From bdea4b6900b51c85d4ffb9511ba6a04a3b42b91e Mon Sep 17 00:00:00 2001 From: Michael Buckley Date: Thu, 4 Apr 2024 19:53:35 -0400 Subject: [PATCH 01/26] Merge history table prefetch with resource table. (#5825) --- .../7_2_0/5824-fix-history-prefetch.yaml | 5 + .../fhir/jpa/dao/BaseHapiFhirSystemDao.java | 229 +++++++++--------- .../bulk/imprt2/ConsumeFilesStepR4Test.java | 7 +- .../r4/FhirResourceDaoR4QueryCountTest.java | 32 +-- .../jpa/dao/r4/PartitioningSqlR4Test.java | 8 +- .../uhn/fhir/jpa/reindex/ReindexStepTest.java | 10 +- .../r5/FhirSystemDaoTransactionR5Test.java | 7 +- ...iftedRefchainsAndChainedSortingR5Test.java | 6 +- 8 files changed, 150 insertions(+), 154 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml new file mode 100644 index 00000000000..f1a9dd7f3be --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5824 +jira: SMILE-7999 +title: "We now avoid a query during reindex and transaction processing that was very slow on Sql Server." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index 951045de041..e1f7749801a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -28,7 +28,6 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; -import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.expunge.ExpungeService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; @@ -38,7 +37,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory; -import ca.uhn.fhir.jpa.search.builder.SearchBuilder; +import ca.uhn.fhir.jpa.search.SearchConstants; import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.rest.api.server.IBundleProvider; @@ -47,34 +46,31 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.util.StopWatch; import com.google.common.annotations.VisibleForTesting; +import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContextType; +import jakarta.persistence.Query; import jakarta.persistence.TypedQuery; -import jakarta.persistence.criteria.CriteriaBuilder; -import jakarta.persistence.criteria.CriteriaQuery; -import jakarta.persistence.criteria.JoinType; -import jakarta.persistence.criteria.Predicate; -import jakarta.persistence.criteria.Root; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; public abstract class BaseHapiFhirSystemDao extends BaseStorageDao implements IFhirSystemDao { - - public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0]; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); + public ResourceCountCache myResourceCountsCache; @PersistenceContext(type = PersistenceContextType.TRANSACTION) @@ -95,9 +91,6 @@ public abstract class BaseHapiFhirSystemDao extends B @Autowired private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; - @Autowired - private IResourceTagDao myResourceTagDao; - @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; @@ -181,13 +174,25 @@ public abstract class BaseHapiFhirSystemDao extends B return myTransactionProcessor.transaction(theRequestDetails, theRequest, true); } + /** + * Prefetch entities into the Hibernate session. + * + * When processing several resources (e.g. transaction bundle, $reindex chunk, etc.) + * it would be slow to fetch each piece of a resource (e.g. all token index rows) + * one resource at a time. + * Instead, we fetch all the linked resources for the entire batch and populate the Hibernate Session. + * + * @param theResolvedIds the pids + * @param thePreFetchIndexes Should resource indexes be loaded + */ + @SuppressWarnings("rawtypes") @Override public

void preFetchResources( List

theResolvedIds, boolean thePreFetchIndexes) { HapiTransactionService.requireTransaction(); List pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList()); - new QueryChunker().chunk(pids, ids -> { + new QueryChunker().chunk(pids, idChunk -> { /* * Pre-fetch the resources we're touching in this transaction in mass - this reduced the @@ -200,122 +205,110 @@ public abstract class BaseHapiFhirSystemDao extends B * * However, for realistic average workloads, this should reduce the number of round trips. */ - if (ids.size() >= 2) { - List loadedResourceTableEntries = new ArrayList<>(); - - new QueryChunker() - .chunk( - ids, - nextChunk -> - loadedResourceTableEntries.addAll(myResourceTableDao.findAllById(nextChunk))); - - List entityIds; + if (idChunk.size() >= 2) { + List entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk); if (thePreFetchIndexes) { - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsStringPopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "string", "myParamsString", null); - } - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsTokenPopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "token", "myParamsToken", null); - } + prefetchByField("string", "myParamsString", ResourceTable::isParamsStringPopulated, entityChunk); + prefetchByField("token", "myParamsToken", ResourceTable::isParamsTokenPopulated, entityChunk); + prefetchByField("date", "myParamsDate", ResourceTable::isParamsDatePopulated, entityChunk); + prefetchByField( + "quantity", "myParamsQuantity", ResourceTable::isParamsQuantityPopulated, entityChunk); + prefetchByField("resourceLinks", "myResourceLinks", ResourceTable::isHasLinks, entityChunk); - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsDatePopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "date", "myParamsDate", null); - } + prefetchByJoinClause( + "tags", + // fetch the TagResources and the actual TagDefinitions + "LEFT JOIN FETCH r.myTags t LEFT JOIN FETCH t.myTag", + BaseHasResource::isHasTags, + entityChunk); - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsQuantityPopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null); - } - - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isHasLinks) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null); - } - - entityIds = loadedResourceTableEntries.stream() - .filter(BaseHasResource::isHasTags) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - myResourceTagDao.findByResourceIds(entityIds); - preFetchIndexes(entityIds, "tags", "myTags", null); - } - - entityIds = loadedResourceTableEntries.stream() - .map(ResourceTable::getId) - .collect(Collectors.toList()); if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) { - preFetchIndexes(entityIds, "searchParamPresence", "mySearchParamPresents", null); + prefetchByField("searchParamPresence", "mySearchParamPresents", r -> true, entityChunk); } } - - new QueryChunker() - .chunk(loadedResourceTableEntries, SearchBuilder.getMaximumPageSize() / 2, entries -> { - Map entities = - entries.stream().collect(Collectors.toMap(ResourceTable::getId, t -> t)); - - CriteriaBuilder b = myEntityManager.getCriteriaBuilder(); - CriteriaQuery q = b.createQuery(ResourceHistoryTable.class); - Root from = q.from(ResourceHistoryTable.class); - - from.fetch("myProvenance", JoinType.LEFT); - - List orPredicates = new ArrayList<>(); - for (ResourceTable next : entries) { - Predicate resId = b.equal(from.get("myResourceId"), next.getId()); - Predicate resVer = b.equal(from.get("myResourceVersion"), next.getVersion()); - orPredicates.add(b.and(resId, resVer)); - } - q.where(b.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY))); - List resultList = - myEntityManager.createQuery(q).getResultList(); - for (ResourceHistoryTable next : resultList) { - ResourceTable nextEntity = entities.get(next.getResourceId()); - if (nextEntity != null) { - nextEntity.setCurrentVersionEntity(next); - } - } - }); } }); } - private void preFetchIndexes( - List theIds, - String typeDesc, - String fieldName, - @Nullable List theEntityListToPopulate) { - new QueryChunker().chunk(theIds, ids -> { - TypedQuery query = myEntityManager.createQuery( - "FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", - ResourceTable.class); - query.setParameter("IDS", ids); - List indexFetchOutcome = query.getResultList(); - ourLog.debug("Pre-fetched {} {}} indexes", indexFetchOutcome.size(), typeDesc); - if (theEntityListToPopulate != null) { - theEntityListToPopulate.addAll(indexFetchOutcome); - } - }); + @Nonnull + private List prefetchResourceTableHistoryAndProvenance(List idChunk) { + assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked"; + + Query query = myEntityManager.createQuery("select r, h " + + " FROM ResourceTable r " + + " LEFT JOIN fetch ResourceHistoryTable h " + + " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId " + + " left join fetch h.myProvenance " + + " WHERE r.myId IN ( :IDS ) "); + query.setParameter("IDS", idChunk); + + @SuppressWarnings("unchecked") + Stream queryResultStream = query.getResultStream(); + return queryResultStream + .map(nextPair -> { + // Store the matching ResourceHistoryTable in the transient slot on ResourceTable + ResourceTable result = (ResourceTable) nextPair[0]; + ResourceHistoryTable currentVersion = (ResourceHistoryTable) nextPair[1]; + result.setCurrentVersionEntity(currentVersion); + return result; + }) + .collect(Collectors.toList()); + } + + /** + * Prefetch a join field for the active subset of some ResourceTable entities. + * Convenience wrapper around prefetchByJoinClause() for simple fields. + * + * @param theDescription for logging + * @param theJpaFieldName the join field from ResourceTable + * @param theEntityPredicate select which ResourceTable entities need this join + * @param theEntities the ResourceTable entities to consider + */ + private void prefetchByField( + String theDescription, + String theJpaFieldName, + Predicate theEntityPredicate, + List theEntities) { + + String joinClause = "LEFT JOIN FETCH r." + theJpaFieldName; + + prefetchByJoinClause(theDescription, joinClause, theEntityPredicate, theEntities); + } + + /** + * Prefetch a join field for the active subset of some ResourceTable entities. + * + * @param theDescription for logging + * @param theJoinClause the JPA join expression to add to `ResourceTable r` + * @param theEntityPredicate selects which entities need this prefetch + * @param theEntities the ResourceTable entities to consider + */ + private void prefetchByJoinClause( + String theDescription, + String theJoinClause, + Predicate theEntityPredicate, + List theEntities) { + + // Which entities need this prefetch? + List idSubset = theEntities.stream() + .filter(theEntityPredicate) + .map(ResourceTable::getId) + .collect(Collectors.toList()); + + if (idSubset.isEmpty()) { + // nothing to do + return; + } + + String jqlQuery = "FROM ResourceTable r " + theJoinClause + " WHERE r.myId IN ( :IDS )"; + + TypedQuery query = myEntityManager.createQuery(jqlQuery, ResourceTable.class); + query.setParameter("IDS", idSubset); + List indexFetchOutcome = query.getResultList(); + + ourLog.debug("Pre-fetched {} {} indexes", indexFetchOutcome.size(), theDescription); } @Nullable diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java index ee86c0596df..2076cf970fa 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java @@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.bulk.imprt2; import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.jobs.imprt.ConsumeFilesStep; import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.dao.r4.BasePartitioningR4Test; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.IdType; @@ -84,7 +83,7 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test { // Validate - assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread(), myCaptureQueriesListener.getInsertQueriesForCurrentThread().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n"))); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -145,9 +144,9 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test { // Validate if (partitionEnabled) { - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); - } else { assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + } else { + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); } assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java index 1f299d970ef..abba1fbe50c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java @@ -149,7 +149,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test @Autowired private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc; @Autowired - private ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc;; + private ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc; @Autowired private ReindexStep myReindexStep; @Autowired @@ -827,7 +827,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?active=true", new SystemRequestDetails()); // Validate - assertEquals(13, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(12, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(30, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -1026,10 +1026,10 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test @ParameterizedTest @CsvSource({ // OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate - " false, CURRENT_VERSION, 2, 0", - " true, CURRENT_VERSION, 12, 0", - " false, ALL_VERSIONS, 12, 0", - " true, ALL_VERSIONS, 22, 0", + " false, CURRENT_VERSION, 1, 0", + " true, CURRENT_VERSION, 11, 0", + " false, ALL_VERSIONS, 11, 0", + " true, ALL_VERSIONS, 21, 0", }) public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) { // Setup @@ -1841,7 +1841,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(2, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -1857,7 +1857,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(2, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -1927,7 +1927,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(9, myCaptureQueriesListener.countSelectQueries()); + assertEquals(7, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(7, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -1943,7 +1943,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(7, myCaptureQueriesListener.countSelectQueries()); + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(5, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -2239,7 +2239,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(9, myCaptureQueriesListener.countSelectQueries()); + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -2256,7 +2256,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + assertEquals(7, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -2271,7 +2271,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(6, myCaptureQueriesListener.countSelectQueries()); + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -3365,7 +3365,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); Bundle outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get()); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); @@ -3388,7 +3388,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get()); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); @@ -3449,7 +3449,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); mySystemDao.transaction(new SystemRequestDetails(), loadResourceFromClasspath(Bundle.class, "r4/transaction-perf-bundle-smallchanges.json")); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(5, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java index 3899c13939f..07c474b0ff0 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java @@ -2842,7 +2842,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueriesForCurrentThread(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); @@ -2859,7 +2859,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueriesForCurrentThread(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); @@ -2874,7 +2874,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueriesForCurrentThread(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); @@ -2924,7 +2924,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { output = mySystemDao.transaction(requestDetails, input); myCaptureQueriesListener.logSelectQueries(); - assertEquals(29, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(26, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java index 81ad6c198c7..c619cc0b410 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java @@ -64,7 +64,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(5, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); @@ -95,7 +95,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(8, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(7, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); @@ -128,7 +128,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(5, myCaptureQueriesListener.logSelectQueries().size()); // name, family, phonetic, deceased, active assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); @@ -196,7 +196,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(10, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(9, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(4, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); @@ -241,7 +241,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(4, outcome.getRecordsProcessed()); - assertEquals(9, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(8, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(2, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java index f5d7992b1fc..2fe8d2762ca 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.util.BundleBuilder; +import jakarta.annotation.Nonnull; import org.hl7.fhir.r5.model.BooleanType; import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.CodeType; @@ -20,7 +21,6 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.ValueSource; -import jakarta.annotation.Nonnull; import java.io.IOException; import java.util.UUID; @@ -28,7 +28,6 @@ import static org.apache.commons.lang3.StringUtils.countMatches; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.matchesPattern; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -151,7 +150,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { // Verify - assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(theMatchUrlCacheEnabled ? 3 : 4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -203,7 +202,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { // Verify - assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(theMatchUrlCacheEnabled ? 3 : 4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java index 453b541eebb..5cf0e2a4127 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java @@ -401,7 +401,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(10, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written @@ -441,7 +441,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id)); bb.addTransactionCreateEntry(newPatientP1_HomerSimpson().setId(p1Id)).conditional("identifier=http://system|200"); bb.addTransactionCreateEntry(newPatientP2_MargeSimpson().setId(p2Id)).conditional("identifier=http://system|300"); - ; + Bundle requestBundle = bb.getBundleTyped(); myCaptureQueriesListener.clear(); @@ -496,7 +496,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id)); bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("200")).setId(p1Id)).conditional("identifier=http://system|200"); bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("300")).setId(p2Id)).conditional("identifier=http://system|300"); - ; + Bundle requestBundle = bb.getBundleTyped(); myCaptureQueriesListener.clear(); From 3a5ff47c586cd9467e4ce2c2c2e3efdfacf73eca Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Thu, 4 Apr 2024 22:24:05 -0300 Subject: [PATCH 02/26] Add CapabilityStatementFilterFactory to register filters that can modify the capability statement (#5814) * added new customizer filter factory. have not yet deleted old code it replaces. * added new customizer filter factory. have not yet deleted old code it replaces. * replaced websocket filter. works. still some cleanup to do * replaced websocket filter. works. still some cleanup to do * cosmetic change * add coverage and fix bugs it found * spotless * move capability statement classes * add changelog and rename new classes * review feedback --- .../7_2_0/5814-resource-provider-factory.yaml | 5 + .../fhir/mdm/provider/MdmProviderLoader.java | 28 +++-- .../IObservableSupplierSetObserver.java | 14 +++ .../IResourceProviderFactoryObserver.java | 13 +-- .../provider/ObservableSupplierSet.java | 104 ++++++++++++++++++ .../provider/ResourceProviderFactory.java | 43 ++------ .../provider/ObservableSupplierSetTest.java | 102 +++++++++++++++++ 7 files changed, 252 insertions(+), 57 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5814-resource-provider-factory.yaml create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java create mode 100644 hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSetTest.java diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5814-resource-provider-factory.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5814-resource-provider-factory.yaml new file mode 100644 index 00000000000..9667dc0e6e2 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5814-resource-provider-factory.yaml @@ -0,0 +1,5 @@ +--- +type: change +issue: 5814 +title: "Extracted methods out of ResourceProviderFactory into ObservableSupplierSet so that functionality can be used by +other services. Unit tests revealed a cleanup bug in MdmProviderLoader that is fixed in this MR." diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java index aa1d32e72a3..8c64b2b898d 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java @@ -32,6 +32,8 @@ import jakarta.annotation.PreDestroy; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import java.util.function.Supplier; + @Service public class MdmProviderLoader { @Autowired @@ -58,26 +60,28 @@ public class MdmProviderLoader { @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; - private BaseMdmProvider myMdmProvider; - private MdmLinkHistoryProviderDstu3Plus myMdmHistoryProvider; + private Supplier myMdmProviderSupplier; + private Supplier myMdmHistoryProviderSupplier; public void loadProvider() { switch (myFhirContext.getVersion().getVersion()) { case DSTU3: case R4: case R5: - myResourceProviderFactory.addSupplier(() -> new MdmProviderDstu3Plus( + // We store the supplier so that removeSupplier works properly + myMdmProviderSupplier = () -> new MdmProviderDstu3Plus( myFhirContext, myMdmControllerSvc, myMdmControllerHelper, myMdmSubmitSvc, myInterceptorBroadcaster, - myMdmSettings)); + myMdmSettings); + // We store the supplier so that removeSupplier works properly + myResourceProviderFactory.addSupplier(myMdmProviderSupplier); if (myStorageSettings.isNonResourceDbHistoryEnabled()) { - myResourceProviderFactory.addSupplier(() -> { - return new MdmLinkHistoryProviderDstu3Plus( - myFhirContext, myMdmControllerSvc, myInterceptorBroadcaster); - }); + myMdmHistoryProviderSupplier = () -> new MdmLinkHistoryProviderDstu3Plus( + myFhirContext, myMdmControllerSvc, myInterceptorBroadcaster); + myResourceProviderFactory.addSupplier(myMdmHistoryProviderSupplier); } break; default: @@ -88,11 +92,11 @@ public class MdmProviderLoader { @PreDestroy public void unloadProvider() { - if (myMdmProvider != null) { - myResourceProviderFactory.removeSupplier(() -> myMdmProvider); + if (myMdmProviderSupplier != null) { + myResourceProviderFactory.removeSupplier(myMdmProviderSupplier); } - if (myMdmHistoryProvider != null) { - myResourceProviderFactory.removeSupplier(() -> myMdmHistoryProvider); + if (myMdmHistoryProviderSupplier != null) { + myResourceProviderFactory.removeSupplier(myMdmHistoryProviderSupplier); } } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java new file mode 100644 index 00000000000..ee835d1dfd7 --- /dev/null +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java @@ -0,0 +1,14 @@ +package ca.uhn.fhir.rest.server.provider; + +import jakarta.annotation.Nonnull; + +import java.util.function.Supplier; + +/** + * See {@link ObservableSupplierSet} + */ +public interface IObservableSupplierSetObserver { + void update(@Nonnull Supplier theSupplier); + + void remove(@Nonnull Supplier theSupplier); +} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java index c8fba47fc8a..42b1d77be9a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java @@ -19,12 +19,7 @@ */ package ca.uhn.fhir.rest.server.provider; -import jakarta.annotation.Nonnull; - -import java.util.function.Supplier; - -public interface IResourceProviderFactoryObserver { - void update(@Nonnull Supplier theSupplier); - - void remove(@Nonnull Supplier theSupplier); -} +/** + * See {@link ObservableSupplierSet} + */ +public interface IResourceProviderFactoryObserver extends IObservableSupplierSetObserver {} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java new file mode 100644 index 00000000000..d10e4b19438 --- /dev/null +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java @@ -0,0 +1,104 @@ +package ca.uhn.fhir.rest.server.provider; + +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Supplier; + +/** + * This is a generic implementation of the Observer Design Pattern. + * We use this to pass sets of beans from exporting Spring application contexts to importing Spring application contexts. We defer + * resolving the observed beans via a Supplier to give the exporting context a chance to initialize the beans before they are used. + * @param the class of the Observer + *

+ * A typical usage pattern would be: + *

    + *
  1. Create {@link ObservableSupplierSet} in exporter context.
  2. + *
  3. Add all the suppliers in the exporter context.
  4. + *
  5. Attach the importer to the {@link ObservableSupplierSet}
  6. + *
  7. Importer calls {@link ObservableSupplierSet#getSupplierResults} and processes all the beans
  8. + *
  9. Some other service beans may add more suppliers later as a part of their initialization and the observer handlers will process them accordingly
  10. + *
  11. Those other service beans should call {@link ObservableSupplierSet#removeSupplier(Supplier)} in a @PreDestroy method so they are properly cleaned up if those services are shut down or restarted
  12. + *
+ * + */ +public class ObservableSupplierSet { + private static final Logger ourLog = LoggerFactory.getLogger(ObservableSupplierSet.class); + + private final Set myObservers = Collections.synchronizedSet(new HashSet<>()); + + private final Set> mySuppliers = new LinkedHashSet<>(); + + /** Add a supplier and notify all observers + * + * @param theSupplier supplies the object to be observed + */ + public void addSupplier(@Nonnull Supplier theSupplier) { + if (mySuppliers.add(theSupplier)) { + myObservers.forEach(observer -> observer.update(theSupplier)); + } + } + + /** Remove a supplier and notify all observers. CAUTION, you might think that this code would work, but it does not: + * + * observableSupplierSet.addSupplier(() -> myBean); + * ... + * observableSupplierSet.removeSupplier(() -> myBean); + * + * the removeSupplier in this example would fail because it is a different lambda instance from the first. Instead, + * you need to store the supplier between the add and remove: + * + * mySupplier = () -> myBean; + * observableSupplierSet.addSupplier(mySupplier); + * ... + * observableSupplierSet.removeSupplier(mySupplier); + * + * + * @param theSupplier the supplier to be removed + */ + public void removeSupplier(@Nonnull Supplier theSupplier) { + if (mySuppliers.remove(theSupplier)) { + myObservers.forEach(observer -> observer.remove(theSupplier)); + } else { + ourLog.warn("Failed to remove supplier", new RuntimeException()); + } + } + + /** + * Attach an observer to this observableSupplierSet. This observer will be notified every time a supplier is added or removed. + * @param theObserver the observer to be notified + */ + public void attach(T theObserver) { + myObservers.add(theObserver); + } + + /** + * Detach an observer from this observableSupplierSet, so it is no longer notified when suppliers are added and removed. + * @param theObserver the observer to be removed + */ + public void detach(T theObserver) { + myObservers.remove(theObserver); + } + + /** + * + * @return a list of get() being called on all suppliers. + */ + protected List getSupplierResults() { + List retVal = new ArrayList<>(); + for (Supplier next : mySuppliers) { + Object nextRp = next.get(); + if (nextRp != null) { + retVal.add(nextRp); + } + } + return retVal; + } +} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java index 809418c1241..dce86b45e30 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java @@ -19,45 +19,16 @@ */ package ca.uhn.fhir.rest.server.provider; -import jakarta.annotation.Nonnull; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; -import java.util.function.Supplier; -public class ResourceProviderFactory { - private Set myObservers = Collections.synchronizedSet(new HashSet<>()); - private List> mySuppliers = new ArrayList<>(); - - public void addSupplier(@Nonnull Supplier theSupplier) { - mySuppliers.add(theSupplier); - myObservers.forEach(observer -> observer.update(theSupplier)); - } - - public void removeSupplier(@Nonnull Supplier theSupplier) { - mySuppliers.remove(theSupplier); - myObservers.forEach(observer -> observer.remove(theSupplier)); - } +/** + * This Factory stores FHIR Resource Provider instance suppliers that will be registered on a FHIR Endpoint later. + * See {@link ObservableSupplierSet} + */ +public class ResourceProviderFactory extends ObservableSupplierSet { + public ResourceProviderFactory() {} public List createProviders() { - List retVal = new ArrayList<>(); - for (Supplier next : mySuppliers) { - Object nextRp = next.get(); - if (nextRp != null) { - retVal.add(nextRp); - } - } - return retVal; - } - - public void attach(IResourceProviderFactoryObserver theObserver) { - myObservers.add(theObserver); - } - - public void detach(IResourceProviderFactoryObserver theObserver) { - myObservers.remove(theObserver); + return super.getSupplierResults(); } } diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSetTest.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSetTest.java new file mode 100644 index 00000000000..d13a161141c --- /dev/null +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSetTest.java @@ -0,0 +1,102 @@ +package ca.uhn.fhir.rest.server.provider; + +import ca.uhn.test.util.LogbackCaptureTestExtension; +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.jupiter.api.Assertions.*; + +class ObservableSupplierSetTest { + + private final ObservableSupplierSet myObservableSupplierSet = new ObservableSupplierSet<>(); + private final TestObserver myObserver = new TestObserver(); + private final AtomicInteger myCounter = new AtomicInteger(); + + @RegisterExtension + final LogbackCaptureTestExtension myLogger = new LogbackCaptureTestExtension((ch.qos.logback.classic.Logger) LoggerFactory.getLogger(ObservableSupplierSet.class)); + + @BeforeEach + public void before() { + myObservableSupplierSet.attach(myObserver); + myObserver.assertCalls(0, 0); + } + + @Test + public void observersNotifiedByAddRemove() { + Supplier supplier = myCounter::incrementAndGet; + myObservableSupplierSet.addSupplier(supplier); + myObserver.assertCalls(1, 0); + assertEquals(0, myCounter.get()); + assertThat(myObservableSupplierSet.getSupplierResults(), hasSize(1)); + assertEquals(1, myCounter.get()); + myObservableSupplierSet.removeSupplier(supplier); + myObserver.assertCalls(1, 1); + assertThat(myObservableSupplierSet.getSupplierResults(), hasSize(0)); + } + + @Test + public void testRemoveWrongSupplier() { + myObservableSupplierSet.addSupplier(myCounter::incrementAndGet); + myObserver.assertCalls(1, 0); + assertEquals(0, myCounter.get()); + assertThat(myObservableSupplierSet.getSupplierResults(), hasSize(1)); + assertEquals(1, myCounter.get()); + + // You might expect this to remove our supplier, but in fact it is a different lambda, so it fails and logs a stack trace + myObservableSupplierSet.removeSupplier(myCounter::incrementAndGet); + myObserver.assertCalls(1, 0); + assertThat(myObservableSupplierSet.getSupplierResults(), hasSize(1)); + List events = myLogger.filterLoggingEventsWithMessageContaining("Failed to remove supplier"); + assertThat(events, hasSize(1)); + assertEquals(Level.WARN, events.get(0).getLevel()); + } + + @Test + public void testDetach() { + myObservableSupplierSet.addSupplier(myCounter::incrementAndGet); + myObserver.assertCalls(1, 0); + assertThat(myObservableSupplierSet.getSupplierResults(), hasSize(1)); + + myObservableSupplierSet.addSupplier(myCounter::incrementAndGet); + myObserver.assertCalls(2, 0); + assertThat(myObservableSupplierSet.getSupplierResults(), hasSize(2)); + + myObservableSupplierSet.detach(myObserver); + + // We now have a third supplier but the observer has been detached, so it was not notified of the third supplier + myObservableSupplierSet.addSupplier(myCounter::incrementAndGet); + myObserver.assertCalls(2, 0); + assertThat(myObservableSupplierSet.getSupplierResults(), hasSize(3)); + } + + private static class TestObserver implements IObservableSupplierSetObserver { + int updated = 0; + int removed = 0; + + @Override + public void update(@NotNull Supplier theSupplier) { + ++updated; + } + + @Override + public void remove(@NotNull Supplier theSupplier) { + ++removed; + } + + public void assertCalls(int theExpectedUpdated, int theExpectedRemoved) { + assertEquals(theExpectedUpdated, updated); + assertEquals(theExpectedRemoved, removed); + } + } +} From 620b46dd0ae915b9021a03ddca359d441b7a5c57 Mon Sep 17 00:00:00 2001 From: TipzCM Date: Fri, 5 Apr 2024 15:03:16 -0400 Subject: [PATCH 03/26] unify bulk export patient type and bulk export patient instance (#5822) * step one of unifying the bulk exports * adding changelog * review points * spotless --------- Co-authored-by: leif stawnyczy --- ...atient-type-and-instance-bulk-exports.yaml | 7 ++ .../jobs/export/BulkDataExportProvider.java | 90 +++++++++---------- 2 files changed, 49 insertions(+), 48 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5820-unifying-patient-type-and-instance-bulk-exports.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5820-unifying-patient-type-and-instance-bulk-exports.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5820-unifying-patient-type-and-instance-bulk-exports.yaml new file mode 100644 index 00000000000..fd97471bc53 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5820-unifying-patient-type-and-instance-bulk-exports.yaml @@ -0,0 +1,7 @@ +--- +type: fix +issue: 5820 +title: "Unifying the code paths for Patient type export and Patient instance export. + These paths should be the same, since type is defined by spec, but instance + is just 'syntactic sugar' on top of that spec (and so should be the same). +" diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java index 8882abc5c2e..05f5a95dbcc 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java @@ -37,7 +37,6 @@ import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; -import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; @@ -77,7 +76,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; @@ -293,11 +291,15 @@ public class BulkDataExportProvider { */ private void validateTargetsExists( RequestDetails theRequestDetails, String theTargetResourceName, Iterable theIdParams) { - RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead( - theRequestDetails, theTargetResourceName, theIdParams.iterator().next()); - SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId); - for (IIdType nextId : theIdParams) { - myDaoRegistry.getResourceDao(theTargetResourceName).read(nextId, requestDetails); + if (theIdParams.iterator().hasNext()) { + RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead( + theRequestDetails, + theTargetResourceName, + theIdParams.iterator().next()); + SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId); + for (IIdType nextId : theIdParams) { + myDaoRegistry.getResourceDao(theTargetResourceName).read(nextId, requestDetails); + } } } @@ -364,26 +366,18 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, ServletRequestDetails theRequestDetails) { - validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT); - if (thePatient != null) { - validateTargetsExists( - theRequestDetails, - "Patient", - thePatient.stream().map(s -> new IdDt(s.getValue())).collect(Collectors.toList())); - } + List> patientIds = thePatient != null ? thePatient : new ArrayList<>(); - BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions( + doPatientExport( + theRequestDetails, theOutputFormat, theType, theSince, - theTypeFilter, theExportIdentifier, - thePatient, - theTypePostFetchFilterUrl); - validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes()); - - startJob(theRequestDetails, BulkExportJobParameters); + theTypeFilter, + theTypePostFetchFilterUrl, + patientIds); } /** @@ -417,9 +411,34 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, ServletRequestDetails theRequestDetails) { + + // call the type-level export to ensure spec compliance + patientExport( + theOutputFormat, + theType, + theSince, + theTypeFilter, + theTypePostFetchFilterUrl, + List.of(theIdParam), + theExportIdentifier, + theRequestDetails); + } + + private void doPatientExport( + ServletRequestDetails theRequestDetails, + IPrimitiveType theOutputFormat, + IPrimitiveType theType, + IPrimitiveType theSince, + IPrimitiveType theExportIdentifier, + List> theTypeFilter, + List> theTypePostFetchFilterUrl, + List> thePatientIds) { validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT); - validateTargetsExists(theRequestDetails, "Patient", List.of(theIdParam)); + validateTargetsExists( + theRequestDetails, + "Patient", + thePatientIds.stream().map(c -> new IdType(c.getValue())).collect(Collectors.toList())); BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions( theOutputFormat, @@ -427,7 +446,7 @@ public class BulkDataExportProvider { theSince, theTypeFilter, theExportIdentifier, - theIdParam, + thePatientIds, theTypePostFetchFilterUrl); validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes()); @@ -670,31 +689,6 @@ public class BulkDataExportProvider { return BulkExportJobParameters; } - private BulkExportJobParameters buildPatientBulkExportOptions( - IPrimitiveType theOutputFormat, - IPrimitiveType theType, - IPrimitiveType theSince, - List> theTypeFilter, - IPrimitiveType theExportIdentifier, - IIdType thePatientId, - List> theTypePostFetchFilterUrl) { - IPrimitiveType type = theType; - if (type == null) { - // set type to all patient compartment resources if it is null - type = new StringDt(String.join(",", getPatientCompartmentResources())); - } - BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters( - theOutputFormat, - type, - theSince, - theTypeFilter, - theExportIdentifier, - ExportStyle.PATIENT, - theTypePostFetchFilterUrl); - BulkExportJobParameters.setPatientIds(Collections.singleton(thePatientId.getValue())); - return BulkExportJobParameters; - } - private BulkExportJobParameters buildBulkExportJobParameters( IPrimitiveType theOutputFormat, IPrimitiveType theType, From 5d55594a739f229f5cb9418b339e671394338c70 Mon Sep 17 00:00:00 2001 From: TipzCM Date: Fri, 5 Apr 2024 21:39:03 -0400 Subject: [PATCH 04/26] Batch2 test refactor only (#5823) * refactoring * test refactor only * spotless * bumping version --------- Co-authored-by: leif stawnyczy --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- hapi-fhir-bom/pom.xml | 4 +- hapi-fhir-checkstyle/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 2 +- hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jpa/pom.xml | 2 +- hapi-fhir-jpaserver-base/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-jpaserver-hfql/pom.xml | 2 +- hapi-fhir-jpaserver-ips/pom.xml | 2 +- hapi-fhir-jpaserver-mdm/pom.xml | 2 +- hapi-fhir-jpaserver-model/pom.xml | 2 +- hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu2/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu3/pom.xml | 2 +- hapi-fhir-jpaserver-test-r4/pom.xml | 2 +- .../jpa/batch2/JpaJobPersistenceImplTest.java | 12 +- hapi-fhir-jpaserver-test-r4b/pom.xml | 2 +- hapi-fhir-jpaserver-test-r5/pom.xml | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 2 +- hapi-fhir-server-cds-hooks/pom.xml | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- hapi-fhir-server-openapi/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- .../hapi-fhir-caching-api/pom.xml | 2 +- .../hapi-fhir-caching-caffeine/pom.xml | 4 +- .../hapi-fhir-caching-guava/pom.xml | 2 +- .../hapi-fhir-caching-testing/pom.xml | 2 +- hapi-fhir-serviceloaders/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-sql-migrate/pom.xml | 2 +- hapi-fhir-storage-batch2-jobs/pom.xml | 2 +- .../pom.xml | 2 +- ...tractIJobPersistenceSpecificationTest.java | 683 ++++-------------- .../batch2/test/IInProgressActionsTests.java | 55 ++ .../test/IInstanceStateTransitions.java | 82 +++ .../hapi/fhir/batch2/test/ITestFixture.java | 65 ++ .../fhir/batch2/test/IWorkChunkCommon.java | 23 + .../test/IWorkChunkErrorActionsTests.java | 108 +++ .../test/IWorkChunkStateTransitions.java | 32 + .../batch2/test/IWorkChunkStorageTests.java | 23 + .../batch2/test/WorkChunkTestConstants.java | 18 + .../test/configs/SpyOverrideConfig.java | 27 + hapi-fhir-storage-batch2/pom.xml | 2 +- .../batch2/api/IJobMaintenanceService.java | 7 + .../JobMaintenanceServiceImpl.java | 11 + hapi-fhir-storage-cr/pom.xml | 2 +- hapi-fhir-storage-mdm/pom.xml | 2 +- hapi-fhir-storage-test-utilities/pom.xml | 2 +- hapi-fhir-storage/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- hapi-fhir-structures-r4b/pom.xml | 2 +- hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r4b/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 2 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 90 files changed, 667 insertions(+), 637 deletions(-) create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java create mode 100644 hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 520335fca6b..5e8b7d27de7 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index a0e76657589..c12e6e57b77 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index 5fb7d9b771b..fb35d780562 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 37459003bbd..4da0f9c1d40 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index 659f30ce0fc..08e0b9c2eaf 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index d5a85e8db5e..eed137d374d 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index 31efb5e1cdc..de8fa790b0b 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index bae9d20464c..4fc7c8e427e 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index 0dce45e5b92..ce20d1ab6b2 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 0c8fd1a7c43..638f1ed7be0 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 69c806cf9fb..fca0987ddf5 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 5463e9b5d5a..d90951583dd 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index b253184ccbf..52d01b8dfbf 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index a5d73a949a7..e7c060a760f 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index c992c31b9bb..e00c808a8c9 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index dc0e37591be..a530d5d127d 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 9e57f2dedae..3e92c57b452 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index e969a0be649..17c3a08aa50 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml index 6de7d563dd7..99bed5fc6d0 100644 --- a/hapi-fhir-jpaserver-hfql/pom.xml +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index dbb537202f7..eb66d27cfac 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 6441a09562d..2a8ed1e5e9b 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index b44506f53ab..23b8a5f67c5 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 751edb8b63f..1cbb7748009 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index 4e05e2edf03..cc08fd09695 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index d51a8940bc0..1ced5f66cf1 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index 7fa9c4fbb06..7868b2b95a2 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index 8bec81531a4..9a9709161a5 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java index c2cdca398fb..90654ff9bc1 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java @@ -20,6 +20,7 @@ import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.util.JsonUtil; import ca.uhn.hapi.fhir.batch2.test.AbstractIJobPersistenceSpecificationTest; +import ca.uhn.hapi.fhir.batch2.test.configs.SpyOverrideConfig; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import org.junit.jupiter.api.MethodOrderer; @@ -30,6 +31,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Import; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Sort; @@ -60,6 +62,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; @TestMethodOrder(MethodOrderer.MethodName.class) +@Import(SpyOverrideConfig.class) public class JpaJobPersistenceImplTest extends BaseJpaR4Test { public static final String JOB_DEFINITION_ID = "definition-id"; @@ -323,14 +326,19 @@ public class JpaJobPersistenceImplTest extends BaseJpaR4Test { class Batch2SpecTest extends AbstractIJobPersistenceSpecificationTest { @Override - protected PlatformTransactionManager getTxManager() { + public PlatformTransactionManager getTxManager() { return JpaJobPersistenceImplTest.this.getTxManager(); } @Override - protected WorkChunk freshFetchWorkChunk(String chunkId) { + public WorkChunk freshFetchWorkChunk(String chunkId) { return JpaJobPersistenceImplTest.this.freshFetchWorkChunk(chunkId); } + + @Override + public void runMaintenancePass() { + myBatch2JobHelper.forceRunMaintenancePass(); + } } @Test diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index 77be64734fc..9013e0c6b72 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index 545c9c2fbb4..f91d4266b1a 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index d92fba3b123..72fcdec2607 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index a38b2b8c988..434fa5b7fd1 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index 4749239fb7f..4e2bbda8f5e 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 366ceb47463..5d85b54408b 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index cfaae68b9d9..6371abe0514 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 3d74ccc0870..03bba016e4a 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index e51308bd2a7..41d0b9e5491 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index 32680b37acb..47a646b0196 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index da34ff1770e..1bb5308eec9 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index 3e09e7c0160..615bcaede67 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index 3a7bdba7d54..4e9f671c79c 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 93b126309c1..5514a27054c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index 7a250b4b4c5..638c3933040 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index a99bea8806d..d3e81bb97d3 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index e3b0cf47a83..6ec3497c477 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index f45a5e76eba..133fa52cfd8 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index d554053161d..e23fe9696d2 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 8b56e169941..25eee371362 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index 0827041f3d5..e10a99c7db0 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index bb38f3ccee7..814d926048b 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index 69ed1d4dcff..1762a3dbe65 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/AbstractIJobPersistenceSpecificationTest.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/AbstractIJobPersistenceSpecificationTest.java index ce3ea14e2af..230944b128a 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/AbstractIJobPersistenceSpecificationTest.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/AbstractIJobPersistenceSpecificationTest.java @@ -20,30 +20,28 @@ package ca.uhn.hapi.fhir.batch2.test; +import ca.uhn.fhir.batch2.api.IJobMaintenanceService; import ca.uhn.fhir.batch2.api.IJobPersistence; import ca.uhn.fhir.batch2.api.RunOutcome; +import ca.uhn.fhir.batch2.channel.BatchJobSender; import ca.uhn.fhir.batch2.coordinator.JobDefinitionRegistry; -import ca.uhn.fhir.batch2.maintenance.JobChunkProgressAccumulator; -import ca.uhn.fhir.batch2.maintenance.JobInstanceProcessor; import ca.uhn.fhir.batch2.model.JobDefinition; import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.batch2.model.JobWorkNotification; import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.WorkChunk; -import ca.uhn.fhir.batch2.model.WorkChunkCompletionEvent; import ca.uhn.fhir.batch2.model.WorkChunkCreateEvent; -import ca.uhn.fhir.batch2.model.WorkChunkErrorEvent; -import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.StopWatch; import ca.uhn.hapi.fhir.batch2.test.support.TestJobParameters; import ca.uhn.hapi.fhir.batch2.test.support.TestJobStep2InputType; import ca.uhn.hapi.fhir.batch2.test.support.TestJobStep3InputType; -import com.google.common.collect.ImmutableList; -import org.junit.jupiter.api.BeforeEach; +import ca.uhn.test.concurrency.PointcutLatch; +import jakarta.annotation.Nonnull; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.EnumSource; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -53,580 +51,118 @@ import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionTemplate; -import jakarta.annotation.Nonnull; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; import java.util.concurrent.Callable; import static org.awaitility.Awaitility.await; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.emptyString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.theInstance; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; /** * Specification tests for batch2 storage and event system. * These tests are abstract, and do not depend on JPA. * Test setups should use the public batch2 api to create scenarios. */ -public abstract class AbstractIJobPersistenceSpecificationTest { - private static final Logger ourLog = LoggerFactory.getLogger(AbstractIJobPersistenceSpecificationTest.class); +public abstract class AbstractIJobPersistenceSpecificationTest implements IInProgressActionsTests, IInstanceStateTransitions, ITestFixture, WorkChunkTestConstants { - public static final String JOB_DEFINITION_ID = "definition-id"; - public static final String TARGET_STEP_ID = "step-id"; - public static final String DEF_CHUNK_ID = "definition-chunkId"; - public static final String STEP_CHUNK_ID = "step-chunkId"; - public static final int JOB_DEF_VER = 1; - public static final int SEQUENCE_NUMBER = 1; - public static final String CHUNK_DATA = "{\"key\":\"value\"}"; - public static final String ERROR_MESSAGE_A = "This is an error message: A"; - public static final String ERROR_MESSAGE_B = "This is a different error message: B"; - public static final String ERROR_MESSAGE_C = "This is a different error message: C"; + private static final Logger ourLog = LoggerFactory.getLogger(AbstractIJobPersistenceSpecificationTest.class); @Autowired private IJobPersistence mySvc; - @Nested - class WorkChunkStorage { + @Autowired + private JobDefinitionRegistry myJobDefinitionRegistry; - @Test - public void testStoreAndFetchWorkChunk_NoData() { - JobInstance instance = createInstance(); - String instanceId = mySvc.storeNewInstance(instance); + @Autowired + private PlatformTransactionManager myTransactionManager; - String id = storeWorkChunk(JOB_DEFINITION_ID, TARGET_STEP_ID, instanceId, 0, null); + @Autowired + private IJobMaintenanceService myMaintenanceService; - WorkChunk chunk = mySvc.onWorkChunkDequeue(id).orElseThrow(IllegalArgumentException::new); - assertNull(chunk.getData()); - } + @Autowired + private BatchJobSender myBatchJobSender; - @Test - public void testStoreAndFetchWorkChunk_WithData() { - JobInstance instance = createInstance(); - String instanceId = mySvc.storeNewInstance(instance); - - String id = storeWorkChunk(JOB_DEFINITION_ID, TARGET_STEP_ID, instanceId, 0, CHUNK_DATA); - assertNotNull(id); - runInTransaction(() -> assertEquals(WorkChunkStatusEnum.QUEUED, freshFetchWorkChunk(id).getStatus())); - - WorkChunk chunk = mySvc.onWorkChunkDequeue(id).orElseThrow(IllegalArgumentException::new); - assertEquals(36, chunk.getInstanceId().length()); - assertEquals(JOB_DEFINITION_ID, chunk.getJobDefinitionId()); - assertEquals(JOB_DEF_VER, chunk.getJobDefinitionVersion()); - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); - assertEquals(CHUNK_DATA, chunk.getData()); - - runInTransaction(() -> assertEquals(WorkChunkStatusEnum.IN_PROGRESS, freshFetchWorkChunk(id).getStatus())); - } - - /** - * Should match the diagram in batch2_states.md - * @see hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_batch/batch2_states.md - */ - @Nested - class StateTransitions { - - private String myInstanceId; - private String myChunkId; - - @BeforeEach - void setUp() { - JobInstance jobInstance = createInstance(); - myInstanceId = mySvc.storeNewInstance(jobInstance); - - } - - private String createChunk() { - return storeWorkChunk(JOB_DEFINITION_ID, TARGET_STEP_ID, myInstanceId, 0, CHUNK_DATA); - } - - @Test - public void chunkCreation_isQueued() { - - myChunkId = createChunk(); - - WorkChunk fetchedWorkChunk = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.QUEUED, fetchedWorkChunk.getStatus(), "New chunks are QUEUED"); - } - - @Test - public void chunkReceived_queuedToInProgress() { - - myChunkId = createChunk(); - - // the worker has received the chunk, and marks it started. - WorkChunk chunk = mySvc.onWorkChunkDequeue(myChunkId).orElseThrow(IllegalArgumentException::new); - - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); - assertEquals(CHUNK_DATA, chunk.getData()); - - // verify the db was updated too - WorkChunk fetchedWorkChunk = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, fetchedWorkChunk.getStatus()); - } - - @Nested - class InProgressActions { - @BeforeEach - void setUp() { - // setup - the worker has received the chunk, and has marked it IN_PROGRESS. - myChunkId = createChunk(); - mySvc.onWorkChunkDequeue(myChunkId); - } - - @Test - public void processingOk_inProgressToSuccess_clearsDataSavesRecordCount() { - - // execution ok - mySvc.onWorkChunkCompletion(new WorkChunkCompletionEvent(myChunkId, 3, 0)); - - // verify the db was updated - var workChunkEntity = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.COMPLETED, workChunkEntity.getStatus()); - assertNull(workChunkEntity.getData()); - assertEquals(3, workChunkEntity.getRecordsProcessed()); - assertNull(workChunkEntity.getErrorMessage()); - assertEquals(0, workChunkEntity.getErrorCount()); - } - - @Test - public void processingRetryableError_inProgressToError_bumpsCountRecordsMessage() { - - // execution had a retryable error - mySvc.onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_A)); - - // verify the db was updated - var workChunkEntity = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.ERRORED, workChunkEntity.getStatus()); - assertEquals(ERROR_MESSAGE_A, workChunkEntity.getErrorMessage()); - assertEquals(1, workChunkEntity.getErrorCount()); - } - - @Test - public void processingFailure_inProgressToFailed() { - - // execution had a failure - mySvc.onWorkChunkFailed(myChunkId, "some error"); - - // verify the db was updated - var workChunkEntity = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.FAILED, workChunkEntity.getStatus()); - assertEquals("some error", workChunkEntity.getErrorMessage()); - } - } - - @Nested - class ErrorActions { - public static final String FIRST_ERROR_MESSAGE = ERROR_MESSAGE_A; - @BeforeEach - void setUp() { - // setup - the worker has received the chunk, and has marked it IN_PROGRESS. - myChunkId = createChunk(); - mySvc.onWorkChunkDequeue(myChunkId); - // execution had a retryable error - mySvc.onWorkChunkError(new WorkChunkErrorEvent(myChunkId, FIRST_ERROR_MESSAGE)); - } - - /** - * The consumer will retry after a retryable error is thrown - */ - @Test - void errorRetry_errorToInProgress() { - - // when consumer restarts chunk - WorkChunk chunk = mySvc.onWorkChunkDequeue(myChunkId).orElseThrow(IllegalArgumentException::new); - - // then - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); - - // verify the db state, error message, and error count - var workChunkEntity = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, workChunkEntity.getStatus()); - assertEquals(FIRST_ERROR_MESSAGE, workChunkEntity.getErrorMessage(), "Original error message kept"); - assertEquals(1, workChunkEntity.getErrorCount(), "error count kept"); - } - - @Test - void errorRetry_repeatError_increasesErrorCount() { - // setup - the consumer is re-trying, and marks it IN_PROGRESS - mySvc.onWorkChunkDequeue(myChunkId); - - - // when another error happens - mySvc.onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_B)); - - - // verify the state, new message, and error count - var workChunkEntity = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.ERRORED, workChunkEntity.getStatus()); - assertEquals(ERROR_MESSAGE_B, workChunkEntity.getErrorMessage(), "new error message"); - assertEquals(2, workChunkEntity.getErrorCount(), "error count inc"); - } - - @Test - void errorThenRetryAndComplete_addsErrorCounts() { - // setup - the consumer is re-trying, and marks it IN_PROGRESS - mySvc.onWorkChunkDequeue(myChunkId); - - // then it completes ok. - mySvc.onWorkChunkCompletion(new WorkChunkCompletionEvent(myChunkId, 3, 1)); - - // verify the state, new message, and error count - var workChunkEntity = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.COMPLETED, workChunkEntity.getStatus()); - assertEquals(FIRST_ERROR_MESSAGE, workChunkEntity.getErrorMessage(), "Error message kept."); - assertEquals(2, workChunkEntity.getErrorCount(), "error combined with earlier error"); - } - - @Test - void errorRetry_maxErrors_movesToFailed() { - // we start with 1 error already - - // 2nd try - mySvc.onWorkChunkDequeue(myChunkId); - mySvc.onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_B)); - var chunk = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.ERRORED, chunk.getStatus()); - assertEquals(2, chunk.getErrorCount()); - - // 3rd try - mySvc.onWorkChunkDequeue(myChunkId); - mySvc.onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_B)); - chunk = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.ERRORED, chunk.getStatus()); - assertEquals(3, chunk.getErrorCount()); - - // 4th try - mySvc.onWorkChunkDequeue(myChunkId); - mySvc.onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_C)); - chunk = freshFetchWorkChunk(myChunkId); - assertEquals(WorkChunkStatusEnum.FAILED, chunk.getStatus()); - assertEquals(4, chunk.getErrorCount()); - assertThat("Error message contains last error", chunk.getErrorMessage(), containsString(ERROR_MESSAGE_C)); - assertThat("Error message contains error count and complaint", chunk.getErrorMessage(), containsString("many errors: 4")); - } - } - } - - @Test - public void testMarkChunkAsCompleted_Success() { - JobInstance instance = createInstance(); - String instanceId = mySvc.storeNewInstance(instance); - String chunkId = storeWorkChunk(DEF_CHUNK_ID, STEP_CHUNK_ID, instanceId, SEQUENCE_NUMBER, CHUNK_DATA); - assertNotNull(chunkId); - - runInTransaction(() -> assertEquals(WorkChunkStatusEnum.QUEUED, freshFetchWorkChunk(chunkId).getStatus())); - - sleepUntilTimeChanges(); - - WorkChunk chunk = mySvc.onWorkChunkDequeue(chunkId).orElseThrow(IllegalArgumentException::new); - assertEquals(SEQUENCE_NUMBER, chunk.getSequence()); - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); - assertNotNull(chunk.getCreateTime()); - assertNotNull(chunk.getStartTime()); - assertNull(chunk.getEndTime()); - assertNull(chunk.getRecordsProcessed()); - assertNotNull(chunk.getData()); - runInTransaction(() -> assertEquals(WorkChunkStatusEnum.IN_PROGRESS, freshFetchWorkChunk(chunkId).getStatus())); - - sleepUntilTimeChanges(); - - runInTransaction(() -> mySvc.onWorkChunkCompletion(new WorkChunkCompletionEvent(chunkId, 50, 0))); - - WorkChunk entity = freshFetchWorkChunk(chunkId); - assertEquals(WorkChunkStatusEnum.COMPLETED, entity.getStatus()); - assertEquals(50, entity.getRecordsProcessed()); - assertNotNull(entity.getCreateTime()); - assertNotNull(entity.getStartTime()); - assertNotNull(entity.getEndTime()); - assertNull(entity.getData()); - assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime()); - assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime()); - } - - - @Test - public void testMarkChunkAsCompleted_Error() { - JobInstance instance = createInstance(); - String instanceId = mySvc.storeNewInstance(instance); - String chunkId = storeWorkChunk(DEF_CHUNK_ID, STEP_CHUNK_ID, instanceId, SEQUENCE_NUMBER, null); - assertNotNull(chunkId); - - runInTransaction(() -> assertEquals(WorkChunkStatusEnum.QUEUED, freshFetchWorkChunk(chunkId).getStatus())); - - sleepUntilTimeChanges(); - - WorkChunk chunk = mySvc.onWorkChunkDequeue(chunkId).orElseThrow(IllegalArgumentException::new); - assertEquals(SEQUENCE_NUMBER, chunk.getSequence()); - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); - - sleepUntilTimeChanges(); - - WorkChunkErrorEvent request = new WorkChunkErrorEvent(chunkId, ERROR_MESSAGE_A); - mySvc.onWorkChunkError(request); - runInTransaction(() -> { - WorkChunk entity = freshFetchWorkChunk(chunkId); - assertEquals(WorkChunkStatusEnum.ERRORED, entity.getStatus()); - assertEquals(ERROR_MESSAGE_A, entity.getErrorMessage()); - assertNotNull(entity.getCreateTime()); - assertNotNull(entity.getStartTime()); - assertNotNull(entity.getEndTime()); - assertEquals(1, entity.getErrorCount()); - assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime()); - assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime()); - }); - - // Mark errored again - - WorkChunkErrorEvent request2 = new WorkChunkErrorEvent(chunkId, "This is an error message 2"); - mySvc.onWorkChunkError(request2); - runInTransaction(() -> { - WorkChunk entity = freshFetchWorkChunk(chunkId); - assertEquals(WorkChunkStatusEnum.ERRORED, entity.getStatus()); - assertEquals("This is an error message 2", entity.getErrorMessage()); - assertNotNull(entity.getCreateTime()); - assertNotNull(entity.getStartTime()); - assertNotNull(entity.getEndTime()); - assertEquals(2, entity.getErrorCount()); - assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime()); - assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime()); - }); - - List chunks = ImmutableList.copyOf(mySvc.fetchAllWorkChunksIterator(instanceId, true)); - assertEquals(1, chunks.size()); - assertEquals(2, chunks.get(0).getErrorCount()); - } - - @Test - public void testMarkChunkAsCompleted_Fail() { - JobInstance instance = createInstance(); - String instanceId = mySvc.storeNewInstance(instance); - String chunkId = storeWorkChunk(DEF_CHUNK_ID, STEP_CHUNK_ID, instanceId, SEQUENCE_NUMBER, null); - assertNotNull(chunkId); - - runInTransaction(() -> assertEquals(WorkChunkStatusEnum.QUEUED, freshFetchWorkChunk(chunkId).getStatus())); - - sleepUntilTimeChanges(); - - WorkChunk chunk = mySvc.onWorkChunkDequeue(chunkId).orElseThrow(IllegalArgumentException::new); - assertEquals(SEQUENCE_NUMBER, chunk.getSequence()); - assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); - - sleepUntilTimeChanges(); - - mySvc.onWorkChunkFailed(chunkId, "This is an error message"); - runInTransaction(() -> { - WorkChunk entity = freshFetchWorkChunk(chunkId); - assertEquals(WorkChunkStatusEnum.FAILED, entity.getStatus()); - assertEquals("This is an error message", entity.getErrorMessage()); - assertNotNull(entity.getCreateTime()); - assertNotNull(entity.getStartTime()); - assertNotNull(entity.getEndTime()); - assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime()); - assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime()); - }); - } - - @Test - public void markWorkChunksWithStatusAndWipeData_marksMultipleChunksWithStatus_asExpected() { - JobInstance instance = createInstance(); - String instanceId = mySvc.storeNewInstance(instance); - ArrayList chunkIds = new ArrayList<>(); - for (int i = 0; i < 10; i++) { - WorkChunkCreateEvent chunk = new WorkChunkCreateEvent( - "defId", - 1, - "stepId", - instanceId, - 0, - "{}" - ); - String id = mySvc.onWorkChunkCreate(chunk); - chunkIds.add(id); - } - - runInTransaction(() -> mySvc.markWorkChunksWithStatusAndWipeData(instance.getInstanceId(), chunkIds, WorkChunkStatusEnum.COMPLETED, null)); - - Iterator reducedChunks = mySvc.fetchAllWorkChunksIterator(instanceId, true); - - while (reducedChunks.hasNext()) { - WorkChunk reducedChunk = reducedChunks.next(); - assertTrue(chunkIds.contains(reducedChunk.getId())); - assertEquals(WorkChunkStatusEnum.COMPLETED, reducedChunk.getStatus()); - } - } + public PlatformTransactionManager getTransactionManager() { + return myTransactionManager; } - /** - * Test - * * @see hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_batch/batch2_states.md - */ - @Nested - class InstanceStateTransitions { - - @Test - void createInstance_createsInQueuedWithChunk() { - // given - JobDefinition jd = withJobDefinition(); - - // when - IJobPersistence.CreateResult createResult = - newTxTemplate().execute(status-> - mySvc.onCreateWithFirstChunk(jd, "{}")); - - // then - ourLog.info("job and chunk created {}", createResult); - assertNotNull(createResult); - assertThat(createResult.jobInstanceId, not(emptyString())); - assertThat(createResult.workChunkId, not(emptyString())); - - JobInstance jobInstance = freshFetchJobInstance(createResult.jobInstanceId); - assertThat(jobInstance.getStatus(), equalTo(StatusEnum.QUEUED)); - assertThat(jobInstance.getParameters(), equalTo("{}")); - - WorkChunk firstChunk = freshFetchWorkChunk(createResult.workChunkId); - assertThat(firstChunk.getStatus(), equalTo(WorkChunkStatusEnum.QUEUED)); - assertNull(firstChunk.getData(), "First chunk data is null - only uses parameters"); - } - - @Test - void testCreateInstance_firstChunkDequeued_movesToInProgress() { - // given - JobDefinition jd = withJobDefinition(); - IJobPersistence.CreateResult createResult = newTxTemplate().execute(status-> - mySvc.onCreateWithFirstChunk(jd, "{}")); - assertNotNull(createResult); - - // when - newTxTemplate().execute(status -> mySvc.onChunkDequeued(createResult.jobInstanceId)); - - // then - JobInstance jobInstance = freshFetchJobInstance(createResult.jobInstanceId); - assertThat(jobInstance.getStatus(), equalTo(StatusEnum.IN_PROGRESS)); - } - - - - @ParameterizedTest - @EnumSource(StatusEnum.class) - void cancelRequest_cancelsJob_whenNotFinalState(StatusEnum theState) { - // given - JobInstance cancelledInstance = createInstance(); - cancelledInstance.setStatus(theState); - String instanceId1 = mySvc.storeNewInstance(cancelledInstance); - mySvc.cancelInstance(instanceId1); - - JobInstance normalInstance = createInstance(); - normalInstance.setStatus(theState); - String instanceId2 = mySvc.storeNewInstance(normalInstance); - - JobDefinitionRegistry jobDefinitionRegistry = new JobDefinitionRegistry(); - jobDefinitionRegistry.addJobDefinitionIfNotRegistered(withJobDefinition()); - - - // when - runInTransaction(()-> new JobInstanceProcessor(mySvc, null, instanceId1, new JobChunkProgressAccumulator(), null, jobDefinitionRegistry) - .process()); - - - // then - JobInstance freshInstance1 = mySvc.fetchInstance(instanceId1).orElseThrow(); - if (theState.isCancellable()) { - assertEquals(StatusEnum.CANCELLED, freshInstance1.getStatus(), "cancel request processed"); - assertThat(freshInstance1.getErrorMessage(), containsString("Job instance cancelled")); - } else { - assertEquals(theState, freshInstance1.getStatus(), "cancel request ignored - state unchanged"); - assertNull(freshInstance1.getErrorMessage(), "no error message"); - } - JobInstance freshInstance2 = mySvc.fetchInstance(instanceId2).orElseThrow(); - assertEquals(theState, freshInstance2.getStatus(), "cancel request ignored - cancelled not set"); - } + public IJobPersistence getSvc() { + return mySvc; } - @Test - void testDeleteChunksAndMarkInstanceAsChunksPurged_doesWhatItSays() { - // given - JobDefinition jd = withJobDefinition(); - IJobPersistence.CreateResult createResult = newTxTemplate().execute(status-> - mySvc.onCreateWithFirstChunk(jd, "{}")); - String instanceId = createResult.jobInstanceId; - for (int i = 0; i < 10; i++) { - storeWorkChunk(JOB_DEFINITION_ID, TARGET_STEP_ID, instanceId, i, CHUNK_DATA); - } - JobInstance readback = freshFetchJobInstance(instanceId); - assertFalse(readback.isWorkChunksPurged()); - assertTrue(mySvc.fetchAllWorkChunksIterator(instanceId, true).hasNext(), "has chunk"); - - // when - mySvc.deleteChunksAndMarkInstanceAsChunksPurged(instanceId); - - // then - readback = freshFetchJobInstance(instanceId); - assertTrue(readback.isWorkChunksPurged(), "purged set"); - assertFalse(mySvc.fetchAllWorkChunksIterator(instanceId, true).hasNext(), "chunks gone"); - } - - @Test - void testInstanceUpdate_modifierApplied() { - // given - String instanceId = mySvc.storeNewInstance(createInstance()); - - // when - mySvc.updateInstance(instanceId, instance ->{ - instance.setErrorCount(42); - return true; - }); - - // then - JobInstance jobInstance = freshFetchJobInstance(instanceId); - assertEquals(42, jobInstance.getErrorCount()); - } - - @Test - void testInstanceUpdate_modifierNotAppliedWhenPredicateReturnsFalse() { - // given - JobInstance instance1 = createInstance(); - boolean initialValue = true; - instance1.setFastTracking(initialValue); - String instanceId = mySvc.storeNewInstance(instance1); - - // when - mySvc.updateInstance(instanceId, instance ->{ - instance.setFastTracking(false); - return false; - }); - - // then - JobInstance jobInstance = freshFetchJobInstance(instanceId); - assertEquals(initialValue, jobInstance.isFastTracking()); - } - - private JobDefinition withJobDefinition() { - return JobDefinition.newBuilder() - .setJobDefinitionId(JOB_DEFINITION_ID) + public JobDefinition withJobDefinition(boolean theIsGatedBoolean) { + JobDefinition.Builder builder = JobDefinition.newBuilder() + .setJobDefinitionId(theIsGatedBoolean ? GATED_JOB_DEFINITION_ID : JOB_DEFINITION_ID) .setJobDefinitionVersion(JOB_DEF_VER) .setJobDescription("A job description") .setParametersType(TestJobParameters.class) .addFirstStep(TARGET_STEP_ID, "the first step", TestJobStep2InputType.class, (theStepExecutionDetails, theDataSink) -> new RunOutcome(0)) .addIntermediateStep("2nd-step-id", "the second step", TestJobStep3InputType.class, (theStepExecutionDetails, theDataSink) -> new RunOutcome(0)) - .addLastStep("last-step-id", "the final step", (theStepExecutionDetails, theDataSink) -> new RunOutcome(0)) - .build(); + .addLastStep("last-step-id", "the final step", (theStepExecutionDetails, theDataSink) -> new RunOutcome(0)); + if (theIsGatedBoolean) { + builder.gatedExecution(); + } + return builder.build(); } + @AfterEach + public void after() { + myJobDefinitionRegistry.removeJobDefinition(JOB_DEFINITION_ID, JOB_DEF_VER); + + // clear invocations on the batch sender from previous jobs that might be + // kicking around + Mockito.clearInvocations(myBatchJobSender); + } + + @Override + public ITestFixture getTestManager() { + return this; + } + + @Override + public void enableMaintenanceRunner(boolean theToEnable) { + myMaintenanceService.enableMaintenancePass(theToEnable); + } + + @Nested + class WorkChunkStorage implements IWorkChunkStorageTests { + + @Override + public ITestFixture getTestManager() { + return AbstractIJobPersistenceSpecificationTest.this; + } + + @Nested + class StateTransitions implements IWorkChunkStateTransitions { + + @Override + public ITestFixture getTestManager() { + return AbstractIJobPersistenceSpecificationTest.this; + } + + @Nested + class ErrorActions implements IWorkChunkErrorActionsTests { + + @Override + public ITestFixture getTestManager() { + return AbstractIJobPersistenceSpecificationTest.this; + } + } + } + } @Nonnull - private JobInstance createInstance() { + public JobInstance createInstance(JobDefinition theJobDefinition) { + JobDefinition jobDefinition = theJobDefinition == null ? withJobDefinition(false) + : theJobDefinition; + if (myJobDefinitionRegistry.getJobDefinition(jobDefinition.getJobDefinitionId(), jobDefinition.getJobDefinitionVersion()).isEmpty()) { + myJobDefinitionRegistry.addJobDefinition(jobDefinition); + } + JobInstance instance = new JobInstance(); - instance.setJobDefinitionId(JOB_DEFINITION_ID); + instance.setJobDefinitionId(jobDefinition.getJobDefinitionId()); + instance.setJobDefinitionVersion(jobDefinition.getJobDefinitionVersion()); instance.setStatus(StatusEnum.QUEUED); instance.setJobDefinitionVersion(JOB_DEF_VER); instance.setParameters(CHUNK_DATA); @@ -634,18 +170,20 @@ public abstract class AbstractIJobPersistenceSpecificationTest { return instance; } - private String storeWorkChunk(String theJobDefinitionId, String theTargetStepId, String theInstanceId, int theSequence, String theSerializedData) { + public String storeWorkChunk(String theJobDefinitionId, String theTargetStepId, String theInstanceId, int theSequence, String theSerializedData) { WorkChunkCreateEvent batchWorkChunk = new WorkChunkCreateEvent(theJobDefinitionId, JOB_DEF_VER, theTargetStepId, theInstanceId, theSequence, theSerializedData); return mySvc.onWorkChunkCreate(batchWorkChunk); } + public abstract PlatformTransactionManager getTxManager(); - protected abstract PlatformTransactionManager getTxManager(); - protected abstract WorkChunk freshFetchWorkChunk(String theChunkId); - protected JobInstance freshFetchJobInstance(String theInstanceId) { + public JobInstance freshFetchJobInstance(String theInstanceId) { return runInTransaction(() -> mySvc.fetchInstance(theInstanceId).orElseThrow()); } + @Override + public abstract void runMaintenancePass(); + public TransactionTemplate newTxTemplate() { TransactionTemplate retVal = new TransactionTemplate(getTxManager()); retVal.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); @@ -681,6 +219,39 @@ public abstract class AbstractIJobPersistenceSpecificationTest { await().until(() -> sw.getMillis() > 0); } + public String createAndStoreJobInstance(JobDefinition theJobDefinition) { + JobInstance jobInstance = createInstance(theJobDefinition); + return mySvc.storeNewInstance(jobInstance); + } + public String createAndDequeueWorkChunk(String theJobInstanceId) { + String chunkId = createChunk(theJobInstanceId); + mySvc.onWorkChunkDequeue(chunkId); + return chunkId; + } + @Override + public abstract WorkChunk freshFetchWorkChunk(String theChunkId); + + public String createChunk(String theInstanceId) { + return storeWorkChunk(JOB_DEFINITION_ID, TARGET_STEP_ID, theInstanceId, 0, CHUNK_DATA); + } + + public PointcutLatch disableWorkChunkMessageHandler() { + PointcutLatch latch = new PointcutLatch(new Exception().getStackTrace()[0].getMethodName()); + + doAnswer(a -> { + latch.call(1); + return Void.class; + }).when(myBatchJobSender).sendWorkChannelMessage(any(JobWorkNotification.class)); + return latch; + } + + public void verifyWorkChunkMessageHandlerCalled(PointcutLatch theSendingLatch, int theNumberOfTimes) throws InterruptedException { + theSendingLatch.awaitExpected(); + ArgumentCaptor notificationCaptor = ArgumentCaptor.forClass(JobWorkNotification.class); + + verify(myBatchJobSender, times(theNumberOfTimes)) + .sendWorkChannelMessage(notificationCaptor.capture()); + } } diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java new file mode 100644 index 00000000000..eda69e384ac --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java @@ -0,0 +1,55 @@ +package ca.uhn.hapi.fhir.batch2.test; + +import ca.uhn.fhir.batch2.model.WorkChunkCompletionEvent; +import ca.uhn.fhir.batch2.model.WorkChunkErrorEvent; +import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +public interface IInProgressActionsTests extends IWorkChunkCommon, WorkChunkTestConstants { + + @Test + default void processingOk_inProgressToSuccess_clearsDataSavesRecordCount() { + String jobId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createAndDequeueWorkChunk(jobId); + // execution ok + getTestManager().getSvc().onWorkChunkCompletion(new WorkChunkCompletionEvent(myChunkId, 3, 0)); + + // verify the db was updated + var workChunkEntity = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.COMPLETED, workChunkEntity.getStatus()); + assertNull(workChunkEntity.getData()); + assertEquals(3, workChunkEntity.getRecordsProcessed()); + assertNull(workChunkEntity.getErrorMessage()); + assertEquals(0, workChunkEntity.getErrorCount()); + } + + @Test + default void processingRetryableError_inProgressToError_bumpsCountRecordsMessage() { + String jobId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createAndDequeueWorkChunk(jobId); + // execution had a retryable error + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_A)); + + // verify the db was updated + var workChunkEntity = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.ERRORED, workChunkEntity.getStatus()); + assertEquals(ERROR_MESSAGE_A, workChunkEntity.getErrorMessage()); + assertEquals(1, workChunkEntity.getErrorCount()); + } + + @Test + default void processingFailure_inProgressToFailed() { + String jobId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createAndDequeueWorkChunk(jobId); + // execution had a failure + getTestManager().getSvc().onWorkChunkFailed(myChunkId, "some error"); + + // verify the db was updated + var workChunkEntity = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.FAILED, workChunkEntity.getStatus()); + assertEquals("some error", workChunkEntity.getErrorMessage()); + } +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java new file mode 100644 index 00000000000..57909eaecd1 --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java @@ -0,0 +1,82 @@ +package ca.uhn.hapi.fhir.batch2.test; + +import ca.uhn.fhir.batch2.api.IJobPersistence; +import ca.uhn.fhir.batch2.coordinator.JobDefinitionRegistry; +import ca.uhn.fhir.batch2.maintenance.JobChunkProgressAccumulator; +import ca.uhn.fhir.batch2.maintenance.JobInstanceProcessor; +import ca.uhn.fhir.batch2.model.JobDefinition; +import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.batch2.model.StatusEnum; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +public interface IInstanceStateTransitions extends IWorkChunkCommon, WorkChunkTestConstants { + Logger ourLog = LoggerFactory.getLogger(IInstanceStateTransitions.class); + + @Test + default void testCreateInstance_firstChunkDequeued_movesToInProgress() { + // given + JobDefinition jd = getTestManager().withJobDefinition(false); + IJobPersistence.CreateResult createResult = getTestManager().newTxTemplate().execute(status-> + getTestManager().getSvc().onCreateWithFirstChunk(jd, "{}")); + assertNotNull(createResult); + + // when + getTestManager().newTxTemplate().execute(status -> getTestManager().getSvc().onChunkDequeued(createResult.jobInstanceId)); + + // then + JobInstance jobInstance = getTestManager().freshFetchJobInstance(createResult.jobInstanceId); + assertThat(jobInstance.getStatus(), equalTo(StatusEnum.IN_PROGRESS)); + } + + @ParameterizedTest + @EnumSource(StatusEnum.class) + default void cancelRequest_cancelsJob_whenNotFinalState(StatusEnum theState) { + // given + JobInstance cancelledInstance = createInstance(); + cancelledInstance.setStatus(theState); + String instanceId1 = getTestManager().getSvc().storeNewInstance(cancelledInstance); + getTestManager().getSvc().cancelInstance(instanceId1); + + JobInstance normalInstance = createInstance(); + normalInstance.setStatus(theState); + String instanceId2 = getTestManager().getSvc().storeNewInstance(normalInstance); + + JobDefinitionRegistry jobDefinitionRegistry = new JobDefinitionRegistry(); + jobDefinitionRegistry.addJobDefinitionIfNotRegistered(getTestManager().withJobDefinition(false)); + + // when + getTestManager().runInTransaction(()-> { + new JobInstanceProcessor( + getTestManager().getSvc(), + null, + instanceId1, + new JobChunkProgressAccumulator(), + null, + jobDefinitionRegistry + ).process(); + }); + + // then + JobInstance freshInstance1 = getTestManager().getSvc().fetchInstance(instanceId1).orElseThrow(); + if (theState.isCancellable()) { + assertEquals(StatusEnum.CANCELLED, freshInstance1.getStatus(), "cancel request processed"); + assertThat(freshInstance1.getErrorMessage(), containsString("Job instance cancelled")); + } else { + assertEquals(theState, freshInstance1.getStatus(), "cancel request ignored - state unchanged"); + assertNull(freshInstance1.getErrorMessage(), "no error message"); + } + JobInstance freshInstance2 = getTestManager().getSvc().fetchInstance(instanceId2).orElseThrow(); + assertEquals(theState, freshInstance2.getStatus(), "cancel request ignored - cancelled not set"); + } +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java new file mode 100644 index 00000000000..40089ad9f8c --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java @@ -0,0 +1,65 @@ +package ca.uhn.hapi.fhir.batch2.test; + +import ca.uhn.fhir.batch2.api.IJobPersistence; +import ca.uhn.fhir.batch2.model.JobDefinition; +import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.batch2.model.WorkChunk; +import ca.uhn.hapi.fhir.batch2.test.support.TestJobParameters; +import ca.uhn.test.concurrency.PointcutLatch; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +public interface ITestFixture { + + String createAndStoreJobInstance(JobDefinition theJobDefinition); + + String createAndDequeueWorkChunk(String theJobInstanceId); + + WorkChunk freshFetchWorkChunk(String theChunkId); + + String storeWorkChunk(String theJobDefinitionId, String theTargetStepId, String theInstanceId, int theSequence, String theSerializedData); + + void runInTransaction(Runnable theRunnable); + + void sleepUntilTimeChanges(); + + JobDefinition withJobDefinition(boolean theIsGatedJob); + + TransactionTemplate newTxTemplate(); + + JobInstance freshFetchJobInstance(String theInstanceId); + + void runMaintenancePass(); + + PlatformTransactionManager getTransactionManager(); + + IJobPersistence getSvc(); + + /** + * This assumes a creation of JOB_DEFINITION already + * @param theJobInstanceId + * @return + */ + String createChunk(String theJobInstanceId); + + /** + * Enable/disable the maintenance runner (So it doesn't run on a scheduler) + */ + void enableMaintenanceRunner(boolean theToEnable); + + /** + * Disables the workchunk message handler + * so that we do not actually send messages to the queue; + * useful if mocking state transitions and we don't want to test + * dequeuing. + * Returns a latch that will fire each time a message is sent. + */ + PointcutLatch disableWorkChunkMessageHandler(); + + /** + * + * @param theSendingLatch the latch sent back from the disableWorkChunkMessageHandler method above + * @param theNumberOfTimes the number of invocations to expect + */ + void verifyWorkChunkMessageHandlerCalled(PointcutLatch theSendingLatch, int theNumberOfTimes) throws InterruptedException; +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java new file mode 100644 index 00000000000..0e80c49288f --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java @@ -0,0 +1,23 @@ +package ca.uhn.hapi.fhir.batch2.test; + +import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.batch2.model.StatusEnum; + +public interface IWorkChunkCommon extends WorkChunkTestConstants { + + /** + * Returns the concrete class that is implementing this stuff. + * Used primarily for structure + */ + ITestFixture getTestManager(); + + default JobInstance createInstance() { + JobInstance instance = new JobInstance(); + instance.setJobDefinitionId(JOB_DEFINITION_ID); + instance.setStatus(StatusEnum.QUEUED); + instance.setJobDefinitionVersion(JOB_DEF_VER); + instance.setParameters(CHUNK_DATA); + instance.setReport("TEST"); + return instance; + } +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java new file mode 100644 index 00000000000..98b99350bb5 --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java @@ -0,0 +1,108 @@ +package ca.uhn.hapi.fhir.batch2.test; + +import ca.uhn.fhir.batch2.model.WorkChunk; +import ca.uhn.fhir.batch2.model.WorkChunkCompletionEvent; +import ca.uhn.fhir.batch2.model.WorkChunkErrorEvent; +import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; +import org.junit.jupiter.api.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public interface IWorkChunkErrorActionsTests extends IWorkChunkCommon, WorkChunkTestConstants { + + + /** + * The consumer will retry after a retryable error is thrown + */ + @Test + default void errorRetry_errorToInProgress() { + String jobId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createAndDequeueWorkChunk(jobId); + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, FIRST_ERROR_MESSAGE)); + + // when consumer restarts chunk + WorkChunk chunk = getTestManager().getSvc().onWorkChunkDequeue(myChunkId).orElseThrow(IllegalArgumentException::new); + + // then + assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); + + // verify the db state, error message, and error count + var workChunkEntity = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.IN_PROGRESS, workChunkEntity.getStatus()); + assertEquals(FIRST_ERROR_MESSAGE, workChunkEntity.getErrorMessage(), "Original error message kept"); + assertEquals(1, workChunkEntity.getErrorCount(), "error count kept"); + } + + @Test + default void errorRetry_repeatError_increasesErrorCount() { + String jobId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createAndDequeueWorkChunk(jobId); + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, FIRST_ERROR_MESSAGE)); + + // setup - the consumer is re-trying, and marks it IN_PROGRESS + getTestManager().getSvc().onWorkChunkDequeue(myChunkId); + + + // when another error happens + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_B)); + + // verify the state, new message, and error count + var workChunkEntity = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.ERRORED, workChunkEntity.getStatus()); + assertEquals(ERROR_MESSAGE_B, workChunkEntity.getErrorMessage(), "new error message"); + assertEquals(2, workChunkEntity.getErrorCount(), "error count inc"); + } + + @Test + default void errorThenRetryAndComplete_addsErrorCounts() { + String jobId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createAndDequeueWorkChunk(jobId); + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, FIRST_ERROR_MESSAGE)); + + // setup - the consumer is re-trying, and marks it IN_PROGRESS + getTestManager().getSvc().onWorkChunkDequeue(myChunkId); + + // then it completes ok. + getTestManager().getSvc().onWorkChunkCompletion(new WorkChunkCompletionEvent(myChunkId, 3, 1)); + + // verify the state, new message, and error count + var workChunkEntity = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.COMPLETED, workChunkEntity.getStatus()); + assertEquals(FIRST_ERROR_MESSAGE, workChunkEntity.getErrorMessage(), "Error message kept."); + assertEquals(2, workChunkEntity.getErrorCount(), "error combined with earlier error"); + } + + @Test + default void errorRetry_maxErrors_movesToFailed() { + // we start with 1 error already + String jobId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createAndDequeueWorkChunk(jobId); + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, FIRST_ERROR_MESSAGE)); + + // 2nd try + getTestManager().getSvc().onWorkChunkDequeue(myChunkId); + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_B)); + var chunk = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.ERRORED, chunk.getStatus()); + assertEquals(2, chunk.getErrorCount()); + + // 3rd try + getTestManager().getSvc().onWorkChunkDequeue(myChunkId); + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_B)); + chunk = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.ERRORED, chunk.getStatus()); + assertEquals(3, chunk.getErrorCount()); + + // 4th try + getTestManager().getSvc().onWorkChunkDequeue(myChunkId); + getTestManager().getSvc().onWorkChunkError(new WorkChunkErrorEvent(myChunkId, ERROR_MESSAGE_C)); + chunk = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.FAILED, chunk.getStatus()); + assertEquals(4, chunk.getErrorCount()); + assertThat("Error message contains last error", chunk.getErrorMessage(), containsString(ERROR_MESSAGE_C)); + assertThat("Error message contains error count and complaint", chunk.getErrorMessage(), containsString("many errors: 4")); + } + +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java new file mode 100644 index 00000000000..ac9ba26f2c5 --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java @@ -0,0 +1,32 @@ +package ca.uhn.hapi.fhir.batch2.test; + +import ca.uhn.fhir.batch2.model.WorkChunk; +import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; +import ca.uhn.test.concurrency.PointcutLatch; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public interface IWorkChunkStateTransitions extends IWorkChunkCommon, WorkChunkTestConstants { + + Logger ourLog = LoggerFactory.getLogger(IWorkChunkStateTransitions.class); + + @Test + default void chunkReceived_queuedToInProgress() throws InterruptedException { + String jobInstanceId = getTestManager().createAndStoreJobInstance(null); + String myChunkId = getTestManager().createChunk(jobInstanceId); + + getTestManager().runMaintenancePass(); + // the worker has received the chunk, and marks it started. + WorkChunk chunk = getTestManager().getSvc().onWorkChunkDequeue(myChunkId).orElseThrow(IllegalArgumentException::new); + + assertEquals(WorkChunkStatusEnum.IN_PROGRESS, chunk.getStatus()); + assertEquals(CHUNK_DATA, chunk.getData()); + + // verify the db was updated too + WorkChunk fetchedWorkChunk = getTestManager().freshFetchWorkChunk(myChunkId); + assertEquals(WorkChunkStatusEnum.IN_PROGRESS, fetchedWorkChunk.getStatus()); + } +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java new file mode 100644 index 00000000000..c12de0420ce --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java @@ -0,0 +1,23 @@ +package ca.uhn.hapi.fhir.batch2.test; + +import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.batch2.model.WorkChunk; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertNull; + +public interface IWorkChunkStorageTests extends IWorkChunkCommon, WorkChunkTestConstants { + + @Test + default void testStoreAndFetchWorkChunk_NoData() { + JobInstance instance = createInstance(); + String instanceId = getTestManager().getSvc().storeNewInstance(instance); + + String id = getTestManager().storeWorkChunk(JOB_DEFINITION_ID, TARGET_STEP_ID, instanceId, 0, null); + + getTestManager().runInTransaction(() -> { + WorkChunk chunk = getTestManager().freshFetchWorkChunk(id); + assertNull(chunk.getData()); + }); + } +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java new file mode 100644 index 00000000000..9f5df3120ff --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java @@ -0,0 +1,18 @@ +package ca.uhn.hapi.fhir.batch2.test; + +public interface WorkChunkTestConstants { + public static final String JOB_DEFINITION_ID = "definition-id"; + // we use a separate id for gated jobs because these job definitions might not + // be cleaned up after any given test run + String GATED_JOB_DEFINITION_ID = "gated_job_def_id"; + public static final String TARGET_STEP_ID = "step-id"; + public static final String DEF_CHUNK_ID = "definition-chunkId"; + public static final int JOB_DEF_VER = 1; + public static final int SEQUENCE_NUMBER = 1; + public static final String CHUNK_DATA = "{\"key\":\"value\"}"; + public static final String ERROR_MESSAGE_A = "This is an error message: A"; + public static final String ERROR_MESSAGE_B = "This is a different error message: B"; + public static final String ERROR_MESSAGE_C = "This is a different error message: C"; + + String FIRST_ERROR_MESSAGE = ERROR_MESSAGE_A; +} diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java new file mode 100644 index 00000000000..46331d1c219 --- /dev/null +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java @@ -0,0 +1,27 @@ +package ca.uhn.hapi.fhir.batch2.test.configs; + +import ca.uhn.fhir.batch2.channel.BatchJobSender; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; + +import static org.mockito.Mockito.spy; + +/** + * Provides spying overrides of beans we want to spy on. + * + * We spy the BatchJobSender so we can test state transitions without + * actually sending messages onto the queue + */ +@Configuration +public class SpyOverrideConfig { + @Autowired + BatchJobSender myRealJobSender; + + @Primary + @Bean + public BatchJobSender batchJobSenderSpy() { + return spy(myRealJobSender); + } +} diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index 2d86204e222..7890ad1bfa3 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java index 4b3033a3643..6d4a81ed63b 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java @@ -36,4 +36,11 @@ public interface IJobMaintenanceService { */ @VisibleForTesting void forceMaintenancePass(); + + /** + * This is only to be called in a testing environment + * to ensure state changes are controlled. + */ + @VisibleForTesting + void enableMaintenancePass(boolean thetoEnable); } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java index 099d0a86f43..cf2ec48aafe 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java @@ -97,6 +97,8 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc private Runnable myMaintenanceJobFinishedCallback = () -> {}; private final IReductionStepExecutorService myReductionStepExecutorService; + private boolean myEnabledBool = true; + /** * Constructor */ @@ -196,8 +198,17 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc doMaintenancePass(); } + @Override + public void enableMaintenancePass(boolean theToEnable) { + myEnabledBool = theToEnable; + } + @Override public void runMaintenancePass() { + if (!myEnabledBool) { + ourLog.error("Maintenance job is disabled! This will affect all batch2 jobs!"); + } + if (!myRunMaintenanceSemaphore.tryAcquire()) { ourLog.debug("Another maintenance pass is already in progress. Ignoring request."); return; diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index deec953b4d9..30a4b7d9731 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index aaaffd4dfe4..21684ec422d 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index 97697fbeceb..ba4f65c01e5 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index 96682b6343b..7090925bf5f 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index 541a22ad700..146924739b1 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 0dab5394bcd..76dadd587ec 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index fc3ad9f5c7f..cfddebcbaae 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 04e09044793..3ec00da7fc7 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index b4150ec9c38..55a7aeba956 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index d852ad84473..73801c099d6 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 1c88c4f4869..67862c00146 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index ff040fa535c..eb4c71307da 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 0081f27ff59..02489770f91 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index be3911ce0ac..22142383e08 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index ab34e8d8bcc..27b3b119848 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 522ac2b450c..a33191a05ec 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index bb7ed613bb1..44b6a5f1805 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index fb7965abff1..d58235a44b1 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 11e616c37cd..3bde201665c 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index bd6a099c68c..b1432477175 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 4fb7cf0d88e..368e8aaf6a9 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 5f84df6f169..99a2b4d97a8 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index a3004385ff1..2ce4f6d190a 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 5f119fcd90b..890e8f28ed4 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 4250751cced..83b846622ad 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 0fd0f7c3774..a4d48372ff5 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.7-SNAPSHOT + 7.1.8-SNAPSHOT ../../pom.xml From eafa2aba29d92695795be203d8266ed40348c2da Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Mon, 8 Apr 2024 12:59:54 -0300 Subject: [PATCH 05/26] move mdm interceptor registration (#5830) * licenses * remove mdm placeholder interceptor * remove mdm placeholder interceptor * fix regression * Remove RTE Placeholder Interceptor. Add tests to assert proper boot sequence. * consolidate LogbackCaptureTestExtension (removed the cdr class) * consolidate LogbackCaptureTestExtension (removed the cdr class) --- .../IObservableSupplierSetObserver.java | 19 +++++++++ .../provider/ObservableSupplierSet.java | 19 +++++++++ .../batch2/test/IInProgressActionsTests.java | 19 +++++++++ .../test/IInstanceStateTransitions.java | 19 +++++++++ .../hapi/fhir/batch2/test/ITestFixture.java | 19 +++++++++ .../fhir/batch2/test/IWorkChunkCommon.java | 19 +++++++++ .../test/IWorkChunkErrorActionsTests.java | 19 +++++++++ .../test/IWorkChunkStateTransitions.java | 19 +++++++++ .../batch2/test/IWorkChunkStorageTests.java | 19 +++++++++ .../batch2/test/WorkChunkTestConstants.java | 19 +++++++++ .../test/configs/SpyOverrideConfig.java | 19 +++++++++ .../MdmSubmitterInterceptorLoader.java | 8 ++-- .../util/LogbackCaptureTestExtension.java | 18 +++++++++ .../StaticLogbackCaptureTestExtension.java | 39 +++++++++++++++++++ 14 files changed, 271 insertions(+), 3 deletions(-) create mode 100644 hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java index ee835d1dfd7..90bed1f2ada 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IObservableSupplierSetObserver.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.rest.server.provider; import jakarta.annotation.Nonnull; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java index d10e4b19438..f20eddb181b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ObservableSupplierSet.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.rest.server.provider; import jakarta.annotation.Nonnull; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java index eda69e384ac..63af19404b2 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInProgressActionsTests.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; import ca.uhn.fhir.batch2.model.WorkChunkCompletionEvent; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java index 57909eaecd1..377b603eabb 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IInstanceStateTransitions.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; import ca.uhn.fhir.batch2.api.IJobPersistence; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java index 40089ad9f8c..fdb75990f0d 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/ITestFixture.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; import ca.uhn.fhir.batch2.api.IJobPersistence; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java index 0e80c49288f..d9fabe6d0db 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkCommon.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; import ca.uhn.fhir.batch2.model.JobInstance; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java index 98b99350bb5..092f4092599 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkErrorActionsTests.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; import ca.uhn.fhir.batch2.model.WorkChunk; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java index ac9ba26f2c5..7f906173df9 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStateTransitions.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; import ca.uhn.fhir.batch2.model.WorkChunk; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java index c12de0420ce..0174dcaabdf 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/IWorkChunkStorageTests.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; import ca.uhn.fhir.batch2.model.JobInstance; diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java index 9f5df3120ff..8ab0157e52b 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/WorkChunkTestConstants.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test; public interface WorkChunkTestConstants { diff --git a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java index 46331d1c219..91735cd0414 100644 --- a/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java +++ b/hapi-fhir-storage-batch2-test-utilities/src/main/java/ca/uhn/hapi/fhir/batch2/test/configs/SpyOverrideConfig.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Batch2 specification tests + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.hapi.fhir.batch2.test.configs; import ca.uhn.fhir.batch2.channel.BatchJobSender; diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java index f65d49ab408..58def7ad3cb 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java @@ -41,8 +41,8 @@ public class MdmSubmitterInterceptorLoader { @Autowired JpaStorageSettings myStorageSettings; - @Autowired - private IMdmStorageInterceptor myIMdmStorageInterceptor; + @Autowired(required = false) + private IMdmStorageInterceptor myMdmStorageInterceptor; @Autowired private MdmSearchExpandingInterceptor myMdmSearchExpandingInterceptorInterceptor; @@ -60,7 +60,9 @@ public class MdmSubmitterInterceptorLoader { throw new ConfigurationException( Msg.code(2421) + "MDM requires Message Subscriptions to be enabled in the Storage Settings"); } - myInterceptorService.registerInterceptor(myIMdmStorageInterceptor); + if (myMdmStorageInterceptor != null) { + myInterceptorService.registerInterceptor(myMdmStorageInterceptor); + } myInterceptorService.registerInterceptor(myMdmSearchExpandingInterceptorInterceptor); ourLog.info("MDM interceptors registered"); } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogbackCaptureTestExtension.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogbackCaptureTestExtension.java index f5f61161ba4..eb58685aad5 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogbackCaptureTestExtension.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogbackCaptureTestExtension.java @@ -86,6 +86,14 @@ public class LogbackCaptureTestExtension implements BeforeEachCallback, AfterEac this(theClass.getName()); } + public LogbackCaptureTestExtension(Class theClass, Level theLevel) { + this(theClass.getName(), theLevel); + } + + public LogbackCaptureTestExtension(org.slf4j.Logger theLogger) { + this((Logger) theLogger); + } + /** * Returns a copy to avoid concurrent modification errors. * @return A copy of the log events so far. @@ -154,6 +162,16 @@ public class LogbackCaptureTestExtension implements BeforeEachCallback, AfterEac .collect(Collectors.toList()); } + /** + * Extract the log messages from the logging events. + * @return a copy of the List of log messages + * + */ + @Nonnull + public List getLogMessages() { + return getLogEvents().stream().map(ILoggingEvent::getMessage).collect(Collectors.toList()); + } + // Hamcrest matcher support public static Matcher eventWithLevelAndMessageContains(@Nonnull Level theLevel, @Nonnull String thePartialMessage) { return new LogbackEventMatcher(theLevel, thePartialMessage); diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java new file mode 100644 index 00000000000..9c8dda60674 --- /dev/null +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java @@ -0,0 +1,39 @@ +package ca.uhn.test.util; + +import ch.qos.logback.classic.spi.ILoggingEvent; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +import java.util.List; + +/** + * This is a static wrapper around LogbackCaptureTestExtension for use in IT tests when you need to assert on App + * startup log entries + */ + +public class StaticLogbackCaptureTestExtension implements BeforeAllCallback, AfterAllCallback { + private final LogbackCaptureTestExtension myLogbackCaptureTestExtension; + + public StaticLogbackCaptureTestExtension(LogbackCaptureTestExtension theLogbackCaptureTestExtension) { + myLogbackCaptureTestExtension = theLogbackCaptureTestExtension; + } + + public StaticLogbackCaptureTestExtension() { + myLogbackCaptureTestExtension = new LogbackCaptureTestExtension(); + } + + @Override + public void beforeAll(ExtensionContext theExtensionContext) throws Exception { + myLogbackCaptureTestExtension.beforeEach(theExtensionContext); + } + + @Override + public void afterAll(ExtensionContext theExtensionContext) throws Exception { + myLogbackCaptureTestExtension.afterEach(theExtensionContext); + } + + public List filterLoggingEventsWithMessageEqualTo(String theMessageText) { + return myLogbackCaptureTestExtension.filterLoggingEventsWithMessageEqualTo(theMessageText); + } +} From 107de2cfba04691ec085453062ee98f7200d7a4e Mon Sep 17 00:00:00 2001 From: TipzCM Date: Mon, 8 Apr 2024 13:51:41 -0400 Subject: [PATCH 06/26] reduction step failing fails jobs (#5831) * adding a catch for failing reduction step * spotless * added changelog --------- Co-authored-by: leif stawnyczy --- ...-jobs-failing-in-reduction-fails-jobs.yaml | 8 +++ .../fhir/jpa/batch2/Batch2CoordinatorIT.java | 55 +++++++++++++++++++ .../ReductionStepExecutorServiceImpl.java | 39 ++++++++++--- 3 files changed, 94 insertions(+), 8 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5828-reduction-jobs-failing-in-reduction-fails-jobs.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5828-reduction-jobs-failing-in-reduction-fails-jobs.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5828-reduction-jobs-failing-in-reduction-fails-jobs.yaml new file mode 100644 index 00000000000..978d0e3e233 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5828-reduction-jobs-failing-in-reduction-fails-jobs.yaml @@ -0,0 +1,8 @@ +--- +type: fix +issue: 5828 +title: "When batch 2 jobs with Reduction steps fail in the final part + of the reduction step, this would often leave the job + stuck in the FINALIZE state. + This has been fixed; the job will now FAIL. +" diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java index 130cf6951a3..2e304e08373 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java @@ -234,6 +234,61 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test { assertEquals(1.0, jobInstance.getProgress()); } + @Test + public void reductionStepFailing_willFailJob() throws InterruptedException { + // setup + String jobId = new Exception().getStackTrace()[0].getMethodName(); + int totalChunks = 3; + AtomicInteger chunkCounter = new AtomicInteger(); + String error = "this is an error"; + + buildAndDefine3StepReductionJob(jobId, new IReductionStepHandler() { + + @Override + public void firstStep(StepExecutionDetails theStep, IJobDataSink theDataSink) { + for (int i = 0; i < totalChunks; i++) { + theDataSink.accept(new FirstStepOutput()); + } + } + + @Override + public void secondStep(StepExecutionDetails theStep, IJobDataSink theDataSink) { + SecondStepOutput output = new SecondStepOutput(); + theDataSink.accept(output); + } + + @Override + public void reductionStepConsume(ChunkExecutionDetails theChunkDetails, IJobDataSink theDataSink) { + chunkCounter.getAndIncrement(); + } + + @Override + public void reductionStepRun(StepExecutionDetails theStepExecutionDetails, IJobDataSink theDataSink) { + // always throw + throw new RuntimeException(error); + } + }); + + // test + JobInstanceStartRequest request = buildRequest(jobId); + myFirstStepLatch.setExpectedCount(1); + Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(new SystemRequestDetails(), request); + String instanceId = startResponse.getInstanceId(); + assertNotNull(instanceId); + + // waiting for job to end (any status - but we'll verify failed later) + myBatch2JobHelper.awaitJobHasStatus(instanceId, StatusEnum.getEndedStatuses().toArray(new StatusEnum[0])); + + // verify + Optional instanceOp = myJobPersistence.fetchInstance(instanceId); + assertTrue(instanceOp.isPresent()); + JobInstance jobInstance = instanceOp.get(); + + assertEquals(totalChunks, chunkCounter.get()); + + assertEquals(StatusEnum.FAILED, jobInstance.getStatus()); + } + @Test public void testJobWithReductionStepFiresCompletionHandler() throws InterruptedException { // setup diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java index 48c20840784..febd36805bb 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java @@ -30,6 +30,7 @@ import ca.uhn.fhir.batch2.model.ChunkOutcome; import ca.uhn.fhir.batch2.model.JobDefinitionStep; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobWorkCursor; +import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; @@ -137,7 +138,6 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS public void reducerPass() { if (myCurrentlyExecuting.tryAcquire()) { try { - String[] instanceIds = myInstanceIdToJobWorkCursor.keySet().toArray(new String[0]); if (instanceIds.length > 0) { String instanceId = instanceIds[0]; @@ -214,6 +214,36 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS boolean defaultSuccessValue = true; ReductionStepChunkProcessingResponse response = new ReductionStepChunkProcessingResponse(defaultSuccessValue); + try { + processChunksAndCompleteJob(theJobWorkCursor, step, instance, parameters, reductionStepWorker, response); + } catch (Exception ex) { + ourLog.error("Job completion failed for Job {}", instance.getInstanceId()); + + executeInTransactionWithSynchronization(() -> { + myJobPersistence.updateInstance(instance.getInstanceId(), theInstance -> { + theInstance.setStatus(StatusEnum.FAILED); + return true; + }); + return null; + }); + response.setSuccessful(false); + } + + // if no successful chunks, return false + if (!response.hasSuccessfulChunksIds()) { + response.setSuccessful(false); + } + + return response; + } + + private void processChunksAndCompleteJob( + JobWorkCursor theJobWorkCursor, + JobDefinitionStep step, + JobInstance instance, + PT parameters, + IReductionStepWorker reductionStepWorker, + ReductionStepChunkProcessingResponse response) { try { executeInTransactionWithSynchronization(() -> { try (Stream chunkIterator = @@ -277,13 +307,6 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS return null; }); } - - // if no successful chunks, return false - if (!response.hasSuccessfulChunksIds()) { - response.setSuccessful(false); - } - - return response; } private T executeInTransactionWithSynchronization(Callable runnable) { From 4d8427ad60049a030fffa9d6ea21bc05ed0269b0 Mon Sep 17 00:00:00 2001 From: Luke deGruchy Date: Tue, 9 Apr 2024 10:27:15 -0400 Subject: [PATCH 07/26] Ensure CanonicalizedSubscription reflects StorageSettings.isCrossPartitionSubscriptionEnabled if it's true. Make that setting is true by default (#5810) * First commit with TODOs and logging. * Try to add cross partition config at startup to subscription module. * Barely working solution with JpaStorageSettings injected into the Subscription module with the correct config for cross partition enabled. * Implement agreed upon solution where StorageSettings used in the subscription module uses the JpaStorageSettings cross partition enabled setting. Fix all compile errors. TODOs for tests to add and known test failures. * Fix test errors caused by bad log code. Ensure all modules use StorageSettings for canonicalizer. * Cleanup. * Reintroduce old SubscriptionCanonicalizer constructor, but add a StorageSettings and deprecate it. Cleanup logs and TODOs. * Deprecate FHIR_PATCH. More cleanup. * Deprecate FHIR_PATCH correctly. * Small fix. * Set myCrossPartitionSubscriptionEnabled to true by default. * Fix test failures. * Fix another test. * Code review feedback. * Resolve static analysis warnings. --- .../jpa/model/entity/StorageSettings.java | 2 +- .../model/config/SubscriptionModelConfig.java | 6 +- ...SubscriptionRegisteringSubscriberTest.java | 3 +- .../registry/SubscriptionRegistryTest.java | 3 +- .../module/CanonicalSubscriptionTest.java | 39 ++++--- .../SubscriptionMatchingSubscriberTest.java | 110 +++++++++++++----- ...SubscriptionValidatingInterceptorTest.java | 3 +- ...rtitionedSubscriptionTriggeringR4Test.java | 53 ++++++--- ...SubscriptionValidatingInterceptorTest.java | 3 +- .../interceptor/auth/RuleImplPatch.java | 5 + .../jpa/api/config/JpaStorageSettings.java | 2 +- .../registry/SubscriptionCanonicalizer.java | 32 ++++- .../SubscriptionCanonicalizerTest.java | 63 +++++++++- 13 files changed, 251 insertions(+), 73 deletions(-) diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java index 9ed51762e50..2399cf7917c 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java @@ -97,7 +97,7 @@ public class StorageSettings { private boolean myDefaultSearchParamsCanBeOverridden = true; private Set mySupportedSubscriptionTypes = new HashSet<>(); private boolean myAutoCreatePlaceholderReferenceTargets; - private boolean myCrossPartitionSubscriptionEnabled = false; + private boolean myCrossPartitionSubscriptionEnabled = true; private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE; private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE; private boolean myEnableInMemorySubscriptionMatching = true; diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/model/config/SubscriptionModelConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/model/config/SubscriptionModelConfig.java index ba7ad982ad1..e14d818f5c1 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/model/config/SubscriptionModelConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/model/config/SubscriptionModelConfig.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.subscription.model.config; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer; import org.springframework.context.annotation.Bean; @@ -29,8 +30,9 @@ import org.springframework.context.annotation.Configuration; public class SubscriptionModelConfig { @Bean - public SubscriptionCanonicalizer subscriptionCanonicalizer(FhirContext theFhirContext) { - return new SubscriptionCanonicalizer(theFhirContext); + public SubscriptionCanonicalizer subscriptionCanonicalizer( + FhirContext theFhirContext, StorageSettings theStorageSettings) { + return new SubscriptionCanonicalizer(theFhirContext, theStorageSettings); } @Bean diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriberTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriberTest.java index 1bb464e1435..6efdd7b7c61 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriberTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriberTest.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage; @@ -50,7 +51,7 @@ public class SubscriptionRegisteringSubscriberTest { @Mock private SubscriptionRegistry mySubscriptionRegistry; @Spy - private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext); + private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext, new StorageSettings()); @Mock private DaoRegistry myDaoRegistry; @Mock diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistryTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistryTest.java index ce7b93a8fa5..7c32141e549 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistryTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistryTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.subscription.match.registry; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.subscription.channel.subscription.ISubscriptionDeliveryChannelNamer; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelRegistry; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; @@ -33,7 +34,7 @@ public class SubscriptionRegistryTest { static FhirContext ourFhirContext = FhirContext.forR4Cached(); @Spy - SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(ourFhirContext); + SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(ourFhirContext, new StorageSettings()); @Spy ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer = new TestChannelNamer(); diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/CanonicalSubscriptionTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/CanonicalSubscriptionTest.java index 2a00245eb31..e2410314f9f 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/CanonicalSubscriptionTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/CanonicalSubscriptionTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.subscription.module; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer; import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryJsonMessage; @@ -8,12 +9,15 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage; import ca.uhn.fhir.util.HapiExtensions; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import jakarta.annotation.Nonnull; import org.assertj.core.util.Lists; import org.hamcrest.Matchers; import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.Subscription; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +70,7 @@ public class CanonicalSubscriptionTest { @Test public void testCanonicalSubscriptionRetainsMetaTags() throws IOException { - SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); + SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings()); CanonicalSubscription sub1 = canonicalizer.canonicalize(makeMdmSubscription()); assertTrue(sub1.getTags().keySet().contains(TAG_SYSTEM)); assertEquals(sub1.getTags().get(TAG_SYSTEM), TAG_VALUE); @@ -74,7 +78,7 @@ public class CanonicalSubscriptionTest { @Test public void emailDetailsEquals() { - SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); + SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings()); CanonicalSubscription sub1 = canonicalizer.canonicalize(makeEmailSubscription()); CanonicalSubscription sub2 = canonicalizer.canonicalize(makeEmailSubscription()); assertTrue(sub1.equals(sub2)); @@ -82,7 +86,7 @@ public class CanonicalSubscriptionTest { @Test public void testSerializeMultiPartitionSubscription(){ - SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); + SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings()); Subscription subscription = makeEmailSubscription(); subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(true)); CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription); @@ -90,28 +94,30 @@ public class CanonicalSubscriptionTest { assertEquals(canonicalSubscription.getCrossPartitionEnabled(), true); } - @Test - public void testSerializeIncorrectMultiPartitionSubscription(){ - SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSerializeIncorrectMultiPartitionSubscription(boolean theIsCrossPartitionEnabled){ + final StorageSettings storageSettings = buildStorageSettings(theIsCrossPartitionEnabled); + SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), storageSettings); Subscription subscription = makeEmailSubscription(); subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new StringType().setValue("false")); CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription); - System.out.print(canonicalSubscription); - - assertEquals(canonicalSubscription.getCrossPartitionEnabled(), false); + assertEquals(canonicalSubscription.getCrossPartitionEnabled(), theIsCrossPartitionEnabled); } - @Test - public void testSerializeNonMultiPartitionSubscription(){ - SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSerializeNonMultiPartitionSubscription(boolean theIsCrossPartitionEnabled){ + final StorageSettings storageSettings = buildStorageSettings(theIsCrossPartitionEnabled); + SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), storageSettings); Subscription subscription = makeEmailSubscription(); subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(false)); CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription); System.out.print(canonicalSubscription); - assertEquals(canonicalSubscription.getCrossPartitionEnabled(), false); + assertEquals(canonicalSubscription.getCrossPartitionEnabled(), theIsCrossPartitionEnabled); } @Test @@ -154,4 +160,11 @@ public class CanonicalSubscriptionTest { ResourceDeliveryMessage payload = resourceDeliveryMessage.getPayload(); return payload.getSubscription(); } + + @Nonnull + private static StorageSettings buildStorageSettings(boolean theIsCrossPartitionEnabled) { + final StorageSettings storageSettings = new StorageSettings(); + storageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); + return storageSettings; + } } diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java index 7730df2e400..1c0dcd424e5 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java @@ -27,6 +27,8 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Answers; import org.mockito.InjectMocks; import org.mockito.Mock; @@ -38,6 +40,7 @@ import java.util.Optional; import static ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionCriteriaParser.TypeEnum.STARTYPE_EXPRESSION; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.atLeastOnce; @@ -227,8 +230,10 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri ourObservationListener.awaitExpected(); } - @Test - public void testSubscriptionAndResourceOnDiffPartitionNotMatch() throws InterruptedException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSubscriptionAndResourceOnDiffPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException { + myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); myPartitionSettings.setPartitioningEnabled(true); String payload = "application/fhir+json"; @@ -240,13 +245,18 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri mockSubscriptionRead(requestPartitionId, subscription); sendSubscription(subscription, requestPartitionId, true); - mySubscriptionResourceNotMatched.setExpectedCount(1); - sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(0)); - mySubscriptionResourceNotMatched.awaitExpected(); + final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(0)); + if (theIsCrossPartitionEnabled) { + runWithinLatchLogicExpectSuccess(throwsInterrupted); + } else { + runWithLatchLogicExpectFailure(throwsInterrupted); + } } - @Test - public void testSubscriptionAndResourceOnDiffPartitionNotMatchPart2() throws InterruptedException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSubscriptionAndResourceOnDiffPartitionNotMatchPart2(boolean theIsCrossPartitionEnabled) throws InterruptedException { + myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); myPartitionSettings.setPartitioningEnabled(true); String payload = "application/fhir+json"; @@ -258,13 +268,19 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri mockSubscriptionRead(requestPartitionId, subscription); sendSubscription(subscription, requestPartitionId, true); - mySubscriptionResourceNotMatched.setExpectedCount(1); - sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1)); - mySubscriptionResourceNotMatched.awaitExpected(); + final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1)); + + if (theIsCrossPartitionEnabled) { + runWithinLatchLogicExpectSuccess(throwsInterrupted); + } else { + runWithLatchLogicExpectFailure(throwsInterrupted); + } } - @Test - public void testSubscriptionOnDefaultPartitionAndResourceOnDiffPartitionNotMatch() throws InterruptedException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSubscriptionOnDefaultPartitionAndResourceOnDiffPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException { + myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); myPartitionSettings.setPartitioningEnabled(true); String payload = "application/fhir+json"; @@ -276,13 +292,19 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri mockSubscriptionRead(requestPartitionId, subscription); sendSubscription(subscription, requestPartitionId, true); - mySubscriptionResourceNotMatched.setExpectedCount(1); - sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1)); - mySubscriptionResourceNotMatched.awaitExpected(); + final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1)); + + if (theIsCrossPartitionEnabled) { + runWithinLatchLogicExpectSuccess(throwsInterrupted); + } else { + runWithLatchLogicExpectFailure(throwsInterrupted); + } } - @Test - public void testSubscriptionOnAPartitionAndResourceOnDefaultPartitionNotMatch() throws InterruptedException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSubscriptionOnAPartitionAndResourceOnDefaultPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException { + myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); myPartitionSettings.setPartitioningEnabled(true); String payload = "application/fhir+json"; @@ -294,9 +316,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri mockSubscriptionRead(requestPartitionId, subscription); sendSubscription(subscription, requestPartitionId, true); - mySubscriptionResourceNotMatched.setExpectedCount(1); - sendObservation(code, "SNOMED-CT", RequestPartitionId.defaultPartition()); - mySubscriptionResourceNotMatched.awaitExpected(); + final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.defaultPartition()); + + if (theIsCrossPartitionEnabled) { + runWithinLatchLogicExpectSuccess(throwsInterrupted); + } else { + runWithLatchLogicExpectFailure(throwsInterrupted); + } } @Test @@ -320,8 +346,10 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri ourObservationListener.awaitExpected(); } - @Test - public void testSubscriptionOnOnePartitionDoNotMatchResourceOnMultiplePartitions() throws InterruptedException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSubscriptionOnOnePartitionDoNotMatchResourceOnMultiplePartitions(boolean theIsCrossPartitionEnabled) throws InterruptedException { + myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); myPartitionSettings.setPartitioningEnabled(true); String payload = "application/fhir+json"; @@ -333,10 +361,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri mockSubscriptionRead(requestPartitionId, subscription); sendSubscription(subscription, requestPartitionId, true); - mySubscriptionResourceNotMatched.setExpectedCount(1); - List partitionId = Collections.synchronizedList(Lists.newArrayList(0, 2)); - sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionIds(partitionId)); - mySubscriptionResourceNotMatched.awaitExpected(); + final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionIds(Collections.synchronizedList(Lists.newArrayList(0, 2)))); + + if (theIsCrossPartitionEnabled) { + runWithinLatchLogicExpectSuccess(throwsInterrupted); + } else { + runWithLatchLogicExpectFailure(throwsInterrupted); + } } @Test @@ -519,4 +550,31 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri verify(message, atLeastOnce()).getPayloadId(null); } } + + private interface ThrowsInterrupted { + void runOrThrow() throws InterruptedException; + } + + private void runWithLatchLogicExpectFailure(ThrowsInterrupted theRunnable) { + try { + mySubscriptionResourceNotMatched.setExpectedCount(1); + theRunnable.runOrThrow(); + mySubscriptionResourceNotMatched.awaitExpected(); + } catch (InterruptedException exception) { + Thread.currentThread().interrupt(); + } + } + + private void runWithinLatchLogicExpectSuccess(ThrowsInterrupted theRunnable) { + try { + ourObservationListener.setExpectedCount(1); + mySubscriptionResourceMatched.setExpectedCount(1); + theRunnable.runOrThrow(); + mySubscriptionResourceMatched.awaitExpected(); + ourObservationListener.awaitExpected(); + } catch (InterruptedException exception) { + fail(); + Thread.currentThread().interrupt(); + } + } } diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptorTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptorTest.java index a89f16de667..13bfbd66e92 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptorTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptorTest.java @@ -6,6 +6,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer; @@ -226,7 +227,7 @@ public class SubscriptionValidatingInterceptorTest { @Bean SubscriptionCanonicalizer subscriptionCanonicalizer(FhirContext theFhirContext) { - return new SubscriptionCanonicalizer(theFhirContext); + return new SubscriptionCanonicalizer(theFhirContext, new StorageSettings()); } @Bean diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/PartitionedSubscriptionTriggeringR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/PartitionedSubscriptionTriggeringR4Test.java index 1db467bd049..22c7d97817b 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/PartitionedSubscriptionTriggeringR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/PartitionedSubscriptionTriggeringR4Test.java @@ -5,6 +5,7 @@ import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.dao.r4.BasePartitioningR4Test; @@ -24,15 +25,13 @@ import jakarta.servlet.ServletException; import org.awaitility.core.ConditionTimeoutException; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.hl7.fhir.r4.model.BooleanType; -import org.hl7.fhir.r4.model.Observation; -import org.hl7.fhir.r4.model.Parameters; -import org.hl7.fhir.r4.model.Patient; -import org.hl7.fhir.r4.model.Subscription; +import org.hl7.fhir.r4.model.*; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -41,6 +40,7 @@ import java.time.LocalDate; import java.time.Month; import java.util.ArrayList; import java.util.List; +import java.util.stream.Stream; import static org.awaitility.Awaitility.await; import static org.hamcrest.CoreMatchers.containsString; @@ -146,8 +146,10 @@ public class PartitionedSubscriptionTriggeringR4Test extends BaseSubscriptionsR4 Assertions.assertEquals(Constants.CT_FHIR_JSON_NEW, BaseSubscriptionsR4Test.ourRestfulServer.getRequestContentTypes().get(0)); } - @Test - public void testCreateSubscriptionInPartitionAndResourceInDifferentPartition() throws Exception { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testCreateSubscriptionInPartitionAndResourceInDifferentPartition(boolean theIsCrossPartitionEnabled) throws Exception { + myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); String payload = "application/fhir+json"; String code = "1000000050"; @@ -169,31 +171,40 @@ public class PartitionedSubscriptionTriggeringR4Test extends BaseSubscriptionsR4 Assertions.assertEquals(0, BaseSubscriptionsR4Test.ourPatientProvider.getCountCreate()); try { - // Should have 0 matching subscription, if we get 1 update count then the test fails BaseSubscriptionsR4Test.ourPatientProvider.waitForUpdateCount(1); - fail(); + if (!theIsCrossPartitionEnabled) { + fail("Expecting a timeout and 0 matching subscriptions and thus a timeout if cross partition is DISabled"); + } + Assertions.assertEquals(1, BaseSubscriptionsR4Test.ourRestfulServer.getRequestContentTypes().size()); } catch (ConditionTimeoutException e) { - Assertions.assertEquals(0, BaseSubscriptionsR4Test.ourRestfulServer.getRequestContentTypes().size()); + if (theIsCrossPartitionEnabled) { + fail("Expecting no timeout and 1 matching subscriptions and thus a timeout if cross partition is enabled"); + } else { + // Should have 0 matching subscription, if we get 1 update count then the test fails + Assertions.assertEquals(0, BaseSubscriptionsR4Test.ourRestfulServer.getRequestContentTypes().size()); + } } } - - @Test - public void testManualTriggeredSubscriptionDoesNotCheckOutsideOfPartition() throws Exception { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testManualTriggeredSubscriptionDoesNotCheckOutsideOfPartition(boolean theIsCrossPartitionEnabled) throws Exception { + myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled); String payload = "application/fhir+json"; String code = "1000000050"; String criteria1 = "Observation?code=SNOMED-CT|" + code + "&_format=xml"; //Given: We store a resource in partition 2 myPartitionInterceptor.setRequestPartitionId(REQ_PART_2); - IIdType observationIdPartitionTwo = myDaoRegistry.getResourceDao("Observation").create(createBaseObservation(code, "SNOMED-CT"), mySrd).getId(); + final IFhirResourceDao observation = myDaoRegistry.getResourceDao("Observation"); + IIdType observationIdPartitionTwo = observation.create(createBaseObservation(code, "SNOMED-CT"), mySrd).getId(); //Given: We store a similar resource in partition 1 myPartitionInterceptor.setRequestPartitionId(REQ_PART_1); - IIdType observationIdPartitionOne = myDaoRegistry.getResourceDao("Observation").create(createBaseObservation(code, "SNOMED-CT"), mySrd).getId(); + IIdType observationIdPartitionOne = observation.create(createBaseObservation(code, "SNOMED-CT"), mySrd).getId(); //Given: We create a subscrioption on Partition 1 - IIdType subscriptionId= myDaoRegistry.getResourceDao("Subscription").create(newSubscription(criteria1, payload), mySrd).getId(); + IIdType subscriptionId = myDaoRegistry.getResourceDao("Subscription").create(newSubscription(criteria1, payload), mySrd).getId(); waitForActivatedSubscriptionCount(1); ArrayList> searchUrlList = new ArrayList<>(); @@ -204,8 +215,14 @@ public class PartitionedSubscriptionTriggeringR4Test extends BaseSubscriptionsR4 waitForQueueToDrain(); List resourceUpdates = BaseSubscriptionsR4Test.ourObservationProvider.getResourceUpdates(); - assertThat(resourceUpdates.size(), is(equalTo(1))); - assertThat(resourceUpdates.get(0).getId(), is(equalTo(observationIdPartitionOne.toString()))); + if (theIsCrossPartitionEnabled) { + assertThat(resourceUpdates.size(), is(equalTo(2))); + assertThat(resourceUpdates.stream().map(Resource::getId).sorted().toList(), + is(equalTo(Stream.of(observationIdPartitionOne, observationIdPartitionTwo).map(Object::toString).sorted().toList()))); + } else { + assertThat(resourceUpdates.size(), is(equalTo(1))); + assertThat(resourceUpdates.get(0).getId(), is(equalTo(observationIdPartitionOne.toString()))); + } String responseValue = resultParameters.getParameter().get(0).getValue().primitiveValue(); assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID")); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionValidatingInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionValidatingInterceptorTest.java index 81bd82a4959..68cdbf43109 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionValidatingInterceptorTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionValidatingInterceptorTest.java @@ -5,6 +5,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionMatchingStrategy; import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; @@ -63,7 +64,7 @@ public class SubscriptionValidatingInterceptorTest { @BeforeEach public void before() { mySvc = new SubscriptionValidatingInterceptor(); - mySubscriptionCanonicalizer = spy(new SubscriptionCanonicalizer(myCtx)); + mySubscriptionCanonicalizer = spy(new SubscriptionCanonicalizer(myCtx, new StorageSettings())); mySvc.setSubscriptionCanonicalizerForUnitTest(mySubscriptionCanonicalizer); mySvc.setDaoRegistryForUnitTest(myDaoRegistry); mySvc.setSubscriptionStrategyEvaluatorForUnitTest(mySubscriptionStrategyEvaluator); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java index f8e22373650..297fdac52f3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java @@ -27,6 +27,11 @@ import org.hl7.fhir.instance.model.api.IIdType; import java.util.Set; +/** + * @deprecated Users should be instead be granted more granular write permissions that cover PATCH operations. + * @since 7.2.0 + */ +@Deprecated class RuleImplPatch extends BaseRule { private boolean myAllRequests; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index 294b72da983..aafb69b8e2d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -48,6 +48,7 @@ import java.util.TreeSet; @SuppressWarnings("JavadocLinkAsPlainText") public class JpaStorageSettings extends StorageSettings { + private static final Logger ourLog = LoggerFactory.getLogger(JpaStorageSettings.class); /** * Default value for {@link #getBulkExportFileMaximumSize()}: 100 MB @@ -105,7 +106,6 @@ public class JpaStorageSettings extends StorageSettings { */ private static final Integer DEFAULT_MAXIMUM_SEARCH_RESULT_COUNT_IN_TRANSACTION = null; - private static final Logger ourLog = LoggerFactory.getLogger(JpaStorageSettings.class); private static final int DEFAULT_REINDEX_BATCH_SIZE = 800; private static final int DEFAULT_MAXIMUM_DELETE_CONFLICT_COUNT = 60; /** diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java index 5e251b5d67b..07e6664a348 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java @@ -23,6 +23,7 @@ import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionMatchingStrategy; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscriptionChannelType; @@ -68,10 +69,23 @@ public class SubscriptionCanonicalizer { private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionCanonicalizer.class); final FhirContext myFhirContext; + private final StorageSettings myStorageSettings; @Autowired + public SubscriptionCanonicalizer(FhirContext theFhirContext, StorageSettings theStorageSettings) { + myFhirContext = theFhirContext; + myStorageSettings = theStorageSettings; + } + + // TODO: LD: remove this constructor once all callers call the 2 arg constructor above + + /** + * @deprecated All callers should invoke {@link SubscriptionCanonicalizer()} instead. + */ + @Deprecated public SubscriptionCanonicalizer(FhirContext theFhirContext) { myFhirContext = theFhirContext; + myStorageSettings = new StorageSettings(); } public CanonicalSubscription canonicalize(IBaseResource theSubscription) { @@ -109,7 +123,7 @@ public class SubscriptionCanonicalizer { retVal.setIdElement(subscription.getIdElement()); retVal.setPayloadString(channel.getPayload()); retVal.setTags(extractTags(subscription)); - retVal.setCrossPartitionEnabled(SubscriptionUtil.isCrossPartition(theSubscription)); + handleCrossPartition(theSubscription, retVal); retVal.setSendDeleteMessages(extractDeleteExtensionDstu2(subscription)); } catch (FHIRException theE) { throw new InternalErrorException(Msg.code(557) + theE); @@ -161,7 +175,7 @@ public class SubscriptionCanonicalizer { retVal.setPayloadSearchCriteria( getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); retVal.setTags(extractTags(subscription)); - retVal.setCrossPartitionEnabled(SubscriptionUtil.isCrossPartition(theSubscription)); + handleCrossPartition(theSubscription, retVal); if (retVal.getChannelType() == CanonicalSubscriptionChannelType.EMAIL) { String from; @@ -292,7 +306,7 @@ public class SubscriptionCanonicalizer { getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); retVal.setTags(extractTags(subscription)); setPartitionIdOnReturnValue(theSubscription, retVal); - retVal.setCrossPartitionEnabled(SubscriptionUtil.isCrossPartition(theSubscription)); + handleCrossPartition(theSubscription, retVal); List profiles = subscription.getMeta().getProfile(); @@ -497,6 +511,8 @@ public class SubscriptionCanonicalizer { retVal.setSendDeleteMessages(extension.getValueBooleanType().booleanValue()); } + handleCrossPartition(theSubscription, retVal); + return retVal; } @@ -561,6 +577,8 @@ public class SubscriptionCanonicalizer { setR5FlagsBasedOnChannelType(subscription, retVal); + handleCrossPartition(theSubscription, retVal); + return retVal; } @@ -762,4 +780,12 @@ public class SubscriptionCanonicalizer { } return status.getValueAsString(); } + + private void handleCrossPartition(IBaseResource theSubscription, CanonicalSubscription retVal) { + if (myStorageSettings.isCrossPartitionSubscriptionEnabled()) { + retVal.setCrossPartitionEnabled(true); + } else { + retVal.setCrossPartitionEnabled(SubscriptionUtil.isCrossPartition(theSubscription)); + } + } } diff --git a/hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizerTest.java b/hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizerTest.java index 1c34a8e7e73..e379a5d303b 100644 --- a/hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizerTest.java +++ b/hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizerTest.java @@ -1,23 +1,35 @@ package ca.uhn.fhir.jpa.subscription.match.registry; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscriptionChannelType; import ca.uhn.fhir.jpa.subscription.model.CanonicalTopicSubscriptionFilter; import ca.uhn.fhir.model.api.ExtensionDt; +import ca.uhn.fhir.model.api.IFhirVersion; +import ca.uhn.fhir.model.dstu2.FhirDstu2; import ca.uhn.fhir.model.primitive.BooleanDt; import ca.uhn.fhir.subscription.SubscriptionConstants; import ca.uhn.fhir.subscription.SubscriptionTestDataHelper; +import org.hl7.fhir.dstu3.hapi.ctx.FhirDstu3; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.hapi.ctx.FhirR4; import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.Subscription; +import org.hl7.fhir.r4b.hapi.ctx.FhirR4B; +import org.hl7.fhir.r5.hapi.ctx.FhirR5; import org.hl7.fhir.r5.model.Coding; import org.hl7.fhir.r5.model.Enumerations; import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; +import java.util.stream.Stream; + import static ca.uhn.fhir.rest.api.Constants.CT_FHIR_JSON_NEW; import static ca.uhn.fhir.util.HapiExtensions.EX_SEND_DELETE_MESSAGES; import static org.hamcrest.MatcherAssert.assertThat; @@ -26,12 +38,13 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.params.provider.Arguments.arguments; class SubscriptionCanonicalizerTest { FhirContext r4Context = FhirContext.forR4(); - private final SubscriptionCanonicalizer testedSC = new SubscriptionCanonicalizer(r4Context); + private final SubscriptionCanonicalizer testedSC = new SubscriptionCanonicalizer(r4Context, new StorageSettings()); @Test void testCanonicalizeR4SendDeleteMessagesSetsExtensionValueNotPresent() { @@ -72,7 +85,7 @@ class SubscriptionCanonicalizerTest { @Test public void testCanonicalizeDstu2SendDeleteMessages() { //setup - SubscriptionCanonicalizer dstu2Canonicalizer = new SubscriptionCanonicalizer(FhirContext.forDstu2Cached()); + SubscriptionCanonicalizer dstu2Canonicalizer = new SubscriptionCanonicalizer(FhirContext.forDstu2Cached(), new StorageSettings()); ca.uhn.fhir.model.dstu2.resource.Subscription dstu2Sub = new ca.uhn.fhir.model.dstu2.resource.Subscription(); ExtensionDt extensionDt = new ExtensionDt(); extensionDt.setUrl(EX_SEND_DELETE_MESSAGES); @@ -108,7 +121,7 @@ class SubscriptionCanonicalizerTest { @ValueSource(strings = {"full-resource", "id-only", "empty"}) public void testR5Canonicalize_returnsCorrectCanonicalSubscription(String thePayloadContent) { // setup - SubscriptionCanonicalizer r5Canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR5Cached()); + SubscriptionCanonicalizer r5Canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR5Cached(), new StorageSettings()); org.hl7.fhir.r5.model.Subscription.SubscriptionPayloadContent payloadContent = org.hl7.fhir.r5.model.Subscription.SubscriptionPayloadContent.fromCode(thePayloadContent); org.hl7.fhir.r5.model.Subscription subscription = buildR5Subscription(payloadContent); @@ -148,7 +161,7 @@ class SubscriptionCanonicalizerTest { // Example drawn from http://build.fhir.org/ig/HL7/fhir-subscription-backport-ig/Subscription-subscription-zulip.json.html // setup - SubscriptionCanonicalizer r4bCanonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4BCached()); + SubscriptionCanonicalizer r4bCanonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4BCached(), new StorageSettings()); org.hl7.fhir.r4b.model.Subscription subscription = buildR4BSubscription(thePayloadContent); // execute @@ -160,6 +173,46 @@ class SubscriptionCanonicalizerTest { verifyChannelParameters(canonical, thePayloadContent); } + + private static Stream crossPartitionParams() { + return Stream.of( + arguments(true, FhirContext.forDstu2Cached()), + arguments(false, FhirContext.forDstu2Cached()), + arguments(true, FhirContext.forDstu3Cached()), + arguments(false, FhirContext.forDstu3Cached()), + arguments(true, FhirContext.forR4Cached()), + arguments(false, FhirContext.forR4Cached()), + arguments(true, FhirContext.forR4BCached()), + arguments(false, FhirContext.forR4BCached()), + arguments(true, FhirContext.forR5Cached()), + arguments(false, FhirContext.forR5Cached()) + ); + } + @ParameterizedTest + @MethodSource("crossPartitionParams") + void testCrossPartition(boolean theCrossPartitionSubscriptionEnabled, FhirContext theFhirContext) { + final IFhirVersion version = theFhirContext.getVersion(); + IBaseResource subscription = null; + if (version instanceof FhirDstu2){ + subscription = new ca.uhn.fhir.model.dstu2.resource.Subscription(); + } else if (version instanceof FhirDstu3){ + subscription = new org.hl7.fhir.dstu3.model.Subscription(); + } else if (version instanceof FhirR4){ + subscription = new Subscription(); + } else if (version instanceof FhirR4B){ + subscription = new org.hl7.fhir.r4b.model.Subscription(); + } else if (version instanceof FhirR5){ + subscription = new org.hl7.fhir.r5.model.Subscription(); + } + + final StorageSettings storageSettings = new StorageSettings(); + storageSettings.setCrossPartitionSubscriptionEnabled(theCrossPartitionSubscriptionEnabled); + final SubscriptionCanonicalizer subscriptionCanonicalizer = new SubscriptionCanonicalizer(theFhirContext, storageSettings); + final CanonicalSubscription canonicalSubscription = subscriptionCanonicalizer.canonicalize(subscription); + + assertEquals(theCrossPartitionSubscriptionEnabled, canonicalSubscription.getCrossPartitionEnabled()); + } + private org.hl7.fhir.r4b.model.Subscription buildR4BSubscription(String thePayloadContent) { org.hl7.fhir.r4b.model.Subscription subscription = new org.hl7.fhir.r4b.model.Subscription(); @@ -195,7 +248,7 @@ class SubscriptionCanonicalizerTest { // Example drawn from http://build.fhir.org/ig/HL7/fhir-subscription-backport-ig/Subscription-subscription-zulip.json.html // setup - SubscriptionCanonicalizer r4Canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4Cached()); + SubscriptionCanonicalizer r4Canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4Cached(), new StorageSettings()); // execute Subscription subscription = SubscriptionTestDataHelper.buildR4TopicSubscriptionWithContent(thePayloadContent); From 6e5c6fa33c7aea8bae9fe8b646c93757f6b43940 Mon Sep 17 00:00:00 2001 From: Emre Dincturk <74370953+mrdnctrk@users.noreply.github.com> Date: Thu, 11 Apr 2024 15:50:04 -0400 Subject: [PATCH 08/26] disabled old migration step that fails on fresh mssql db (#5841) --- .../tasks/HapiFhirJpaMigrationTasks.java | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index e43682109b0..25094f71020 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -127,7 +127,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { Builder version = forVersion(VersionEnum.V7_2_0); - // allow null codes in concept map targets + // allow null codes in concept map targets (see comment on "20190722.27" if you are going to change this) version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") .modifyColumn("20240327.1", "TARGET_CODE") .nullable() @@ -2489,10 +2489,26 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { .modifyColumn("20190722.26", "SYSTEM_VERSION") .nullable() .withType(ColumnTypeEnum.STRING, 200); + + /* + DISABLED THIS STEP (20190722.27) ON PURPOSE BECAUSE IT STARTED CAUSING FAILURES ON MSSQL FOR A FRESH DB. + I left it here for historical purposes. + The reason for the failure is as follows. The TARGET_CODE column was originally 'not nullable' when it was + first introduced. And in 7_2_0, it is being changed to a nullable column (see 20240327.1 in init720()). + Starting with 7_2_0, on a fresh db, we create the table with nullable TARGET_CODE (as it is made nullable now). + Since we run all migration steps on fresh db, this step will try to convert the column which is created as nullable + to not nullable (which will then need to be coverted back to nullable in 7_2_0 migration). + Changing a nullable column to not nullable is not allowed in + MSSQL if there is an index on the column, which is the case here, as there is IDX_CNCPT_MP_GRP_ELM_TGT_CD + on this column. Since init720() has the right migration + step, where the column is set to nullable and has the right type and length, this statement is also + not necessary anymore even for not fresh dbs. + version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") .modifyColumn("20190722.27", "TARGET_CODE") .nonNullable() .withType(ColumnTypeEnum.STRING, 500); + */ version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") .modifyColumn("20190722.28", "VALUESET_URL") .nullable() From e3cc83a694c8a542ae53a91ea618a4d4df13180c Mon Sep 17 00:00:00 2001 From: JasonRoberts-smile <85363818+JasonRoberts-smile@users.noreply.github.com> Date: Thu, 11 Apr 2024 20:19:56 -0400 Subject: [PATCH 09/26] fix bug (#5843) --- .../java/ca/uhn/fhir/parser/BaseParser.java | 25 ++++++++++--------- .../changelog/7_2_0/5842-json-parser-npe.yaml | 5 ++++ .../ca/uhn/fhir/parser/JsonParserR4Test.java | 12 +++++++++ 3 files changed, 30 insertions(+), 12 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5842-json-parser-npe.yaml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java index a6534da7844..9dbea4a3d7b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java @@ -609,12 +609,14 @@ public abstract class BaseParser implements IParser { private boolean isStripVersionsFromReferences( CompositeChildElement theCompositeChildElement, IBaseResource theResource) { - Set autoVersionReferencesAtPathExtensions = - MetaUtil.getAutoVersionReferencesAtPath(theResource.getMeta(), myContext.getResourceType(theResource)); + if (theResource != null) { + Set autoVersionReferencesAtPathExtensions = MetaUtil.getAutoVersionReferencesAtPath( + theResource.getMeta(), myContext.getResourceType(theResource)); - if (!autoVersionReferencesAtPathExtensions.isEmpty() - && theCompositeChildElement.anyPathMatches(autoVersionReferencesAtPathExtensions)) { - return false; + if (!autoVersionReferencesAtPathExtensions.isEmpty() + && theCompositeChildElement.anyPathMatches(autoVersionReferencesAtPathExtensions)) { + return false; + } } Boolean stripVersionsFromReferences = myStripVersionsFromReferences; @@ -622,21 +624,20 @@ public abstract class BaseParser implements IParser { return stripVersionsFromReferences; } - if (myContext.getParserOptions().isStripVersionsFromReferences() == false) { + if (!myContext.getParserOptions().isStripVersionsFromReferences()) { return false; } Set dontStripVersionsFromReferencesAtPaths = myDontStripVersionsFromReferencesAtPaths; - if (dontStripVersionsFromReferencesAtPaths != null) { - if (dontStripVersionsFromReferencesAtPaths.isEmpty() == false - && theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) { - return false; - } + if (dontStripVersionsFromReferencesAtPaths != null + && !dontStripVersionsFromReferencesAtPaths.isEmpty() + && theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) { + return false; } dontStripVersionsFromReferencesAtPaths = myContext.getParserOptions().getDontStripVersionsFromReferencesAtPaths(); - return dontStripVersionsFromReferencesAtPaths.isEmpty() != false + return dontStripVersionsFromReferencesAtPaths.isEmpty() || !theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths); } diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5842-json-parser-npe.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5842-json-parser-npe.yaml new file mode 100644 index 00000000000..4f6f8570ff9 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5842-json-parser-npe.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5842 +title: "Fixed a bug where an NPE was being thrown when trying to serialize a FHIR fragment (e.g., backboneElement +, compound datatype) to a string representation if the fragment contains a `Reference`." diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/parser/JsonParserR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/parser/JsonParserR4Test.java index 47b73e6d0cc..8964bd244f2 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/parser/JsonParserR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/parser/JsonParserR4Test.java @@ -1168,6 +1168,18 @@ public class JsonParserR4Test extends BaseTest { assertEquals(expected, actual); } + @Test + public void testEncodeToString_CompoundTypeWithReference() { + Identifier identifier = new Identifier(); + identifier.setSystem("http://system.org"); + identifier.setValue("123"); + Reference reference = new Reference("Organization/1"); + identifier.setAssigner(reference); + String expected = "{\"system\":\"http://system.org\",\"value\":\"123\",\"assigner\":{\"reference\":\"Organization/1\"}}"; + String actual = ourCtx.newJsonParser().encodeToString(identifier); + assertEquals(expected, actual); + } + @Test public void testEncodeToString_Resource() { Patient p = new Patient(); From b85a26f4a061fd85cf078b933906ad70a5540caf Mon Sep 17 00:00:00 2001 From: volodymyr-korzh <132366313+volodymyr-korzh@users.noreply.github.com> Date: Fri, 12 Apr 2024 17:13:39 -0600 Subject: [PATCH 10/26] Automated Migration Testing (HAPI-FHIR) (#5836) * Automated Migration Testing (HAPI-FHIR) - added and updated test migration scripts for V5_2_0, V5_3_0, V5_4_0, V5_5_0, V5_7_0, V6_0_0, V6_3_0, V6_6_0, V6_8_0, V7_0_0 --- .../releases/V5_2_0/data/H2_EMBEDDED.sql | 4 +- .../releases/V5_2_0/data/MSSQL_2012.sql | 4 +- .../releases/V5_2_0/data/ORACLE_12C.sql | 4 +- .../releases/V5_2_0/data/POSTGRES_9_4.sql | 4 +- .../releases/V5_3_0/data/H2_EMBEDDED.sql | 40 +++- .../releases/V5_3_0/data/MSSQL_2012.sql | 40 +++- .../releases/V5_3_0/data/ORACLE_12C.sql | 34 ++- .../releases/V5_3_0/data/POSTGRES_9_4.sql | 34 ++- .../releases/V5_4_0/data/H2_EMBEDDED.sql | 22 ++ .../releases/V5_4_0/data/MSSQL_2012.sql | 22 ++ .../releases/V5_4_0/data/ORACLE_12C.sql | 22 ++ .../releases/V5_4_0/data/POSTGRES_9_4.sql | 23 +++ .../releases/V5_5_0/data/H2_EMBEDDED.sql | 111 ++++++++++ .../releases/V5_5_0/data/MSSQL_2012.sql | 112 ++++++++++ .../releases/V5_5_0/data/ORACLE_12C.sql | 111 ++++++++++ .../releases/V5_5_0/data/POSTGRES_9_4.sql | 111 ++++++++++ .../releases/V5_7_0/data/H2_EMBEDDED.sql | 69 +++++++ .../releases/V5_7_0/data/MSSQL_2012.sql | 69 +++++++ .../releases/V5_7_0/data/ORACLE_12C.sql | 69 +++++++ .../releases/V5_7_0/data/POSTGRES_9_4.sql | 69 +++++++ .../releases/V6_0_0/data/H2_EMBEDDED.sql | 14 +- .../releases/V6_0_0/data/MSSQL_2012.sql | 14 +- .../releases/V6_0_0/data/ORACLE_12C.sql | 14 +- .../releases/V6_0_0/data/POSTGRES_9_4.sql | 14 +- .../releases/V6_1_0/data/H2_EMBEDDED.sql | 43 ++++ .../releases/V6_1_0/data/MSSQL_2012.sql | 43 ++++ .../releases/V6_1_0/data/ORACLE_12C.sql | 43 ++++ .../releases/V6_1_0/data/POSTGRES_9_4.sql | 43 ++++ .../releases/V6_2_0/data/H2_EMBEDDED.sql | 81 ++++++++ .../releases/V6_2_0/data/MSSQL_2012.sql | 81 ++++++++ .../releases/V6_2_0/data/ORACLE_12C.sql | 81 ++++++++ .../releases/V6_2_0/data/POSTGRES_9_4.sql | 81 ++++++++ .../releases/V6_3_0/data/H2_EMBEDDED.sql | 45 ++++ .../releases/V6_3_0/data/MSSQL_2012.sql | 45 ++++ .../releases/V6_3_0/data/ORACLE_12C.sql | 45 ++++ .../releases/V6_3_0/data/POSTGRES_9_4.sql | 45 ++++ .../releases/V6_6_0/data/H2_EMBEDDED.sql | 52 ++++- .../releases/V6_6_0/data/MSSQL_2012.sql | 52 ++++- .../releases/V6_6_0/data/ORACLE_12C.sql | 52 ++++- .../releases/V6_6_0/data/POSTGRES_9_4.sql | 52 ++++- .../releases/V6_8_0/data/H2_EMBEDDED.sql | 195 ++++++++++++++++++ .../releases/V6_8_0/data/MSSQL_2012.sql | 195 ++++++++++++++++++ .../releases/V6_8_0/data/ORACLE_12C.sql | 195 ++++++++++++++++++ .../releases/V6_8_0/data/POSTGRES_9_4.sql | 195 ++++++++++++++++++ .../releases/V7_0_0/data/H2_EMBEDDED.sql | 57 +++++ .../releases/V7_0_0/data/MSSQL_2012.sql | 57 +++++ .../releases/V7_0_0/data/ORACLE_12C.sql | 57 +++++ .../releases/V7_0_0/data/POSTGRES_9_4.sql | 57 +++++ .../jpa/embedded/HapiSchemaMigrationTest.java | 6 + 49 files changed, 2884 insertions(+), 44 deletions(-) create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/H2_EMBEDDED.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/MSSQL_2012.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/ORACLE_12C.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/POSTGRES_9_4.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/H2_EMBEDDED.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/MSSQL_2012.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/ORACLE_12C.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/POSTGRES_9_4.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/H2_EMBEDDED.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/MSSQL_2012.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/ORACLE_12C.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/POSTGRES_9_4.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/H2_EMBEDDED.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/MSSQL_2012.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/ORACLE_12C.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/POSTGRES_9_4.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/H2_EMBEDDED.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/MSSQL_2012.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/ORACLE_12C.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/POSTGRES_9_4.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/H2_EMBEDDED.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/MSSQL_2012.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/ORACLE_12C.sql create mode 100644 hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/POSTGRES_9_4.sql diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/H2_EMBEDDED.sql index 2647d7931e3..5f167a74a07 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/H2_EMBEDDED.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/H2_EMBEDDED.sql @@ -23,10 +23,10 @@ INSERT INTO MPI_LINK ( 2, 1, 1906, - NULL, + 1.0, 1905, '2023-04-05 15:16:26.43', - NULL, + 61, '1', 1906, 1 diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/MSSQL_2012.sql index 2647d7931e3..5f167a74a07 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/MSSQL_2012.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/MSSQL_2012.sql @@ -23,10 +23,10 @@ INSERT INTO MPI_LINK ( 2, 1, 1906, - NULL, + 1.0, 1905, '2023-04-05 15:16:26.43', - NULL, + 61, '1', 1906, 1 diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/ORACLE_12C.sql index e9a4cc2f51b..2564a214f84 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/ORACLE_12C.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/ORACLE_12C.sql @@ -23,10 +23,10 @@ INSERT INTO MPI_LINK ( 2, 1, 1906, - NULL, + 1.0, 1905, SYSDATE, - NULL, + 61, '1', 1906, 1 diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/POSTGRES_9_4.sql index 2304423b0d6..ff7c3453653 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_2_0/data/POSTGRES_9_4.sql @@ -23,10 +23,10 @@ INSERT INTO MPI_LINK ( 2, true, 1906, - NULL, + 1.0, 1905, '2023-04-05 15:16:26.43', - NULL, + 61, '1', 1906, 1 diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/H2_EMBEDDED.sql index d60b0f272a7..8e056578629 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/H2_EMBEDDED.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/H2_EMBEDDED.sql @@ -46,23 +46,55 @@ INSERT INTO HFJ_SPIDX_QUANTITY_NRML ( RES_TYPE, SP_UPDATED, SP_MISSING, - SP_NAME, SP_ID, + SP_NAME, + SP_ID, SP_SYSTEM, SP_UNITS, HASH_IDENTITY_AND_UNITS, HASH_IDENTITY_SYS_UNITS, HASH_IDENTITY, - SP_VALUE + SP_VALUE, + PARTITION_DATE, + PARTITION_ID ) VALUES ( 1702, 'Observation', '2023-04-05 15:16:26.43', - 0, 'value-quantity', + 0, + 'value-quantity', 2, 'https://unitsofmeasure.org', 'g', -864931808150710347, 6382255012744790145, -1901136387361512731, - 0.012 + 0.012, + '2024-04-05', + 1 +); + +INSERT INTO HFJ_RES_LINK ( + PID, + PARTITION_DATE, + PARTITION_ID, + SRC_PATH, + SRC_RESOURCE_ID, + SOURCE_RESOURCE_TYPE, + TARGET_RESOURCE_ID, + TARGET_RESOURCE_TYPE, + TARGET_RESOURCE_URL, + TARGET_RESOURCE_VERSION, + SP_UPDATED +) VALUES ( + 701, + '2024-04-05', + 1, + 'Observation.subject.where(resolve() is Patient)', + 1702, + 'Observation', + 1906, + 'Patient', + 'http://localhost:8000/Patient/123', + 1, + '2024-03-01 18:01:12.921' ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/MSSQL_2012.sql index d60b0f272a7..8e056578629 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/MSSQL_2012.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/MSSQL_2012.sql @@ -46,23 +46,55 @@ INSERT INTO HFJ_SPIDX_QUANTITY_NRML ( RES_TYPE, SP_UPDATED, SP_MISSING, - SP_NAME, SP_ID, + SP_NAME, + SP_ID, SP_SYSTEM, SP_UNITS, HASH_IDENTITY_AND_UNITS, HASH_IDENTITY_SYS_UNITS, HASH_IDENTITY, - SP_VALUE + SP_VALUE, + PARTITION_DATE, + PARTITION_ID ) VALUES ( 1702, 'Observation', '2023-04-05 15:16:26.43', - 0, 'value-quantity', + 0, + 'value-quantity', 2, 'https://unitsofmeasure.org', 'g', -864931808150710347, 6382255012744790145, -1901136387361512731, - 0.012 + 0.012, + '2024-04-05', + 1 +); + +INSERT INTO HFJ_RES_LINK ( + PID, + PARTITION_DATE, + PARTITION_ID, + SRC_PATH, + SRC_RESOURCE_ID, + SOURCE_RESOURCE_TYPE, + TARGET_RESOURCE_ID, + TARGET_RESOURCE_TYPE, + TARGET_RESOURCE_URL, + TARGET_RESOURCE_VERSION, + SP_UPDATED +) VALUES ( + 701, + '2024-04-05', + 1, + 'Observation.subject.where(resolve() is Patient)', + 1702, + 'Observation', + 1906, + 'Patient', + 'http://localhost:8000/Patient/123', + 1, + '2024-03-01 18:01:12.921' ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/ORACLE_12C.sql index da22f69c522..12669bab472 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/ORACLE_12C.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/ORACLE_12C.sql @@ -52,7 +52,9 @@ INSERT INTO HFJ_SPIDX_QUANTITY_NRML ( HASH_IDENTITY_AND_UNITS, HASH_IDENTITY_SYS_UNITS, HASH_IDENTITY, - SP_VALUE + SP_VALUE, + PARTITION_DATE, + PARTITION_ID ) VALUES ( 1702, 'Observation', @@ -65,5 +67,33 @@ INSERT INTO HFJ_SPIDX_QUANTITY_NRML ( -864931808150710347, 6382255012744790145, -1901136387361512731, - 0.012 + 0.012, + SYSDATE, + 1 +); + +INSERT INTO HFJ_RES_LINK ( + PID, + PARTITION_DATE, + PARTITION_ID, + SRC_PATH, + SRC_RESOURCE_ID, + SOURCE_RESOURCE_TYPE, + TARGET_RESOURCE_ID, + TARGET_RESOURCE_TYPE, + TARGET_RESOURCE_URL, + TARGET_RESOURCE_VERSION, + SP_UPDATED +) VALUES ( + 701, + SYSDATE, + 1, + 'Observation.subject.where(resolve() is Patient)', + 1702, + 'Observation', + 1906, + 'Patient', + 'http://localhost:8000/Patient/123', + 1, + SYSDATE ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/POSTGRES_9_4.sql index 672a30d684b..27851a99ed0 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_3_0/data/POSTGRES_9_4.sql @@ -52,7 +52,9 @@ INSERT INTO HFJ_SPIDX_QUANTITY_NRML ( HASH_IDENTITY_AND_UNITS, HASH_IDENTITY_SYS_UNITS, HASH_IDENTITY, - SP_VALUE + SP_VALUE, + PARTITION_DATE, + PARTITION_ID ) VALUES ( 1702, 'Observation', @@ -65,5 +67,33 @@ INSERT INTO HFJ_SPIDX_QUANTITY_NRML ( -864931808150710347, 6382255012744790145, -1901136387361512731, - 0.012 + 0.012, + '2024-04-05', + 1 +); + +INSERT INTO HFJ_RES_LINK ( + PID, + PARTITION_DATE, + PARTITION_ID, + SRC_PATH, + SRC_RESOURCE_ID, + SOURCE_RESOURCE_TYPE, + TARGET_RESOURCE_ID, + TARGET_RESOURCE_TYPE, + TARGET_RESOURCE_URL, + TARGET_RESOURCE_VERSION, + SP_UPDATED +) VALUES ( + 701, + '2024-04-05', + 1, + 'Observation.subject.where(resolve() is Patient)', + 1702, + 'Observation', + 1906, + 'Patient', + 'http://localhost:8000/Patient/123', + 1, + '2024-03-01 18:01:12.921' ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/H2_EMBEDDED.sql index ffd2004b26b..f088c8b7918 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/H2_EMBEDDED.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/H2_EMBEDDED.sql @@ -35,3 +35,25 @@ INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( 0, '' ); + +INSERT INTO TRM_VALUESET_CONCEPT ( + PID, + CODEVAL, + DISPLAY, + VALUESET_ORDER, + SYSTEM_URL, + VALUESET_PID, + INDEX_STATUS, + SOURCE_DIRECT_PARENT_PIDS, + SOURCE_PID +) VALUES ( + 177, + 'LA13892-8', + 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', + 1, + 'HTTP://LOINC.ORG', + 59, + 1, + 10820244, + 4824 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/MSSQL_2012.sql index ffd2004b26b..f088c8b7918 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/MSSQL_2012.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/MSSQL_2012.sql @@ -35,3 +35,25 @@ INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( 0, '' ); + +INSERT INTO TRM_VALUESET_CONCEPT ( + PID, + CODEVAL, + DISPLAY, + VALUESET_ORDER, + SYSTEM_URL, + VALUESET_PID, + INDEX_STATUS, + SOURCE_DIRECT_PARENT_PIDS, + SOURCE_PID +) VALUES ( + 177, + 'LA13892-8', + 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', + 1, + 'HTTP://LOINC.ORG', + 59, + 1, + 10820244, + 4824 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/ORACLE_12C.sql index df0ae2c8c1e..ccc0c162d8c 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/ORACLE_12C.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/ORACLE_12C.sql @@ -35,3 +35,25 @@ INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( 0, '' ); + +INSERT INTO TRM_VALUESET_CONCEPT ( + PID, + CODEVAL, + DISPLAY, + VALUESET_ORDER, + SYSTEM_URL, + VALUESET_PID, + INDEX_STATUS, + SOURCE_DIRECT_PARENT_PIDS, + SOURCE_PID +) VALUES ( + 177, + 'LA13892-8', + 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', + 1, + 'HTTP://LOINC.ORG', + 59, + 1, + '10820244', + 4824 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql index ffd2004b26b..7b976a4c826 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql @@ -35,3 +35,26 @@ INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( 0, '' ); + +INSERT INTO TRM_VALUESET_CONCEPT ( + PID, + CODEVAL, + DISPLAY, + VALUESET_ORDER, + SYSTEM_URL, + VALUESET_PID, + INDEX_STATUS, + SOURCE_DIRECT_PARENT_PIDS, + SOURCE_PID +) VALUES ( + 177, + 'LA13892-8', + 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', + 1, + 'HTTP://LOINC.ORG', + 59, + 1, + 10820244, + 4824 +); + diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/H2_EMBEDDED.sql index 6d1daba0099..c640127e1b9 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/H2_EMBEDDED.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/H2_EMBEDDED.sql @@ -15,6 +15,7 @@ INSERT INTO HFJ_RESOURCE ( SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER @@ -36,20 +37,130 @@ INSERT INTO HFJ_RESOURCE ( 0, 0, 1, + 0, 1, 'Observation', 1 ); +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +) VALUES ( + 1654, + 'R4', + 0, + '2023-06-15 09:58:42.95', + '2023-06-15 09:58:42.95', + 0, + '712b39864f0650e6b9ffd7b4ffa7b2de5db4ceecbcbd76b1f1b8e152ed17fcd0', + 1, + 0, + 0, + 0, + 0, + 0, + 0, + 1, + 1, + 0, + 1, + 'VALUESET', + 1 +); INSERT INTO HFJ_IDX_CMB_TOK_NU ( PID, + PARTITION_DATE, + PARTITION_ID, HASH_COMPLETE, IDX_STRING, RES_ID ) VALUES ( 10, + '2024-04-05', + 1, '5570851350247697202', 'Patient?birthdate=1974-12-25&family=WINDSOR&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cmale', 1656 ); + +INSERT INTO HFJ_BLK_IMPORT_JOB ( + PID, + JOB_ID, + JOB_STATUS, + STATUS_TIME, + STATUS_MESSAGE, + JOB_DESC, + OPTLOCK, + FILE_COUNT, + ROW_PROCESSING_MODE, + BATCH_SIZE +) VALUES ( + 61, + '0fe3744a-93f3-4bb8-a59e-15643566e513', + 'READY', + '2024-04-04 16:09:14.488', + '', + 'ETL Import Job: (unnamed)', + 0, + 1, + 'FHIR_TRANSACTION', + 10 +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 65, + 61, + 72995, + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO TRM_VALUESET ( + PID, + EXPANSION_STATUS, + EXPANDED_AT, + VSNAME, + RES_ID, + TOTAL_CONCEPT_DESIGNATIONS, + TOTAL_CONCEPTS, + URL, + VER +) VALUES ( + 60, + 'EXPANDED', + '2024-04-04 16:09:14.488', + 'v2.0926', + 1654, + 0, + 3, + 'http://terminology.hl7.org/ValueSet/v2-0926', + '2.9' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/MSSQL_2012.sql index 33ef46e2f5f..3017e9cc0ba 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/MSSQL_2012.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/MSSQL_2012.sql @@ -15,6 +15,7 @@ INSERT INTO HFJ_RESOURCE ( SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER @@ -36,20 +37,131 @@ INSERT INTO HFJ_RESOURCE ( 'false', 'false', 'true', + 'false', 'true', 'Observation', 1 ); +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +)VALUES ( + 1654, + 'R4', + 'false', + '2023-06-15 09:58:42.95', + '2023-06-15 09:58:42.95', + 'false', + '712b39864f0650e6b9ffd7b4ffa7b2de5db4ceecbcbd76b1f1b8e152ed17fcd0', + 1, + 'false', + 'false', + 'false', + 'false', + 'false', + 'false', + 'true', + 'true', + 'false', + 'true', + 'VALUESET', + 1 +); + INSERT INTO HFJ_IDX_CMB_TOK_NU ( PID, + PARTITION_DATE, + PARTITION_ID, HASH_COMPLETE, IDX_STRING, RES_ID ) VALUES ( 10, + '2024-04-05', + 1, '5570851350247697202', 'Patient?birthdate=1974-12-25&family=WINDSOR&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cmale', 1653 ); + +INSERT INTO HFJ_BLK_IMPORT_JOB ( + PID, + JOB_ID, + JOB_STATUS, + STATUS_TIME, + STATUS_MESSAGE, + JOB_DESC, + OPTLOCK, + FILE_COUNT, + ROW_PROCESSING_MODE, + BATCH_SIZE +) VALUES ( + 61, + '0fe3744a-93f3-4bb8-a59e-15643566e513', + 'READY', + '2024-04-04 16:09:14.488', + '', + 'ETL Import Job: (unnamed)', + 0, + 1, + 'FHIR_TRANSACTION', + 10 +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 65, + 61, + 72995, + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO TRM_VALUESET ( + PID, + EXPANSION_STATUS, + EXPANDED_AT, + VSNAME, + RES_ID, + TOTAL_CONCEPT_DESIGNATIONS, + TOTAL_CONCEPTS, + URL, + VER +) VALUES ( + 60, + 'EXPANDED', + '2024-04-04 16:09:14.488', + 'v2.0926', + 1654, + 0, + 3, + 'http://terminology.hl7.org/ValueSet/v2-0926', + '2.9' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/ORACLE_12C.sql index bef94b97cc3..21310004c02 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/ORACLE_12C.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/ORACLE_12C.sql @@ -15,6 +15,7 @@ INSERT INTO HFJ_RESOURCE ( SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER @@ -36,20 +37,130 @@ INSERT INTO HFJ_RESOURCE ( 0, 0, 1, + 0, 1, 'Observation', 1 ); +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +) VALUES ( + 1654, + 'R4', + 0, + SYSDATE, + SYSDATE, + 0, + '712b39864f0650e6b9ffd7b4ffa7b2de5db4ceecbcbd76b1f1b8e152ed17fcd0', + 1, + 0, + 0, + 0, + 0, + 0, + 0, + 1, + 1, + 0, + 1, + 'VALUESET', + 1 +); INSERT INTO HFJ_IDX_CMB_TOK_NU ( PID, + PARTITION_DATE, + PARTITION_ID, HASH_COMPLETE, IDX_STRING, RES_ID ) VALUES ( 10, + SYSDATE, + 1, '5570851350247697202', 'Patient?birthdate=1974-12-25&family=WINDSOR&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cmale', 1653 ); + +INSERT INTO HFJ_BLK_IMPORT_JOB ( + PID, + JOB_ID, + JOB_STATUS, + STATUS_TIME, + STATUS_MESSAGE, + JOB_DESC, + OPTLOCK, + FILE_COUNT, + ROW_PROCESSING_MODE, + BATCH_SIZE +) VALUES ( + 61, + '0fe3744a-93f3-4bb8-a59e-15643566e513', + 'READY', + SYSDATE, + '', + 'ETL Import Job: (unnamed)', + 0, + 1, + 'FHIR_TRANSACTION', + 10 +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 65, + 61, + HEXTORAW('453d7a34'), + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO TRM_VALUESET ( + PID, + EXPANSION_STATUS, + EXPANDED_AT, + VSNAME, + RES_ID, + TOTAL_CONCEPT_DESIGNATIONS, + TOTAL_CONCEPTS, + URL, + VER +) VALUES ( + 60, + 'EXPANDED', + SYSDATE, + 'v2.0926', + 1654, + 0, + 3, + 'http://terminology.hl7.org/ValueSet/v2-0926', + '2.9' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/POSTGRES_9_4.sql index d55f18c2aa5..41fc1657397 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_5_0/data/POSTGRES_9_4.sql @@ -15,6 +15,7 @@ INSERT INTO HFJ_RESOURCE ( SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER @@ -36,20 +37,130 @@ INSERT INTO HFJ_RESOURCE ( FALSE, FALSE, TRUE, + FALSE, TRUE, 'Observation', 1 ); +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +) VALUES ( + 1654, + 'R4', + FALSE, + '2023-06-15 09:58:42.95', + '2023-06-15 09:58:42.95', + FALSE, + '712b39864f0650e6b9ffd7b4ffa7b2de5db4ceecbcbd76b1f1b8e152ed17fcd0', + 1, + FALSE, + FALSE, + FALSE, + FALSE, + FALSE, + FALSE, + TRUE, + TRUE, + FALSE, + TRUE, + 'VALUESET', + 1 +); INSERT INTO HFJ_IDX_CMB_TOK_NU ( PID, + PARTITION_DATE, + PARTITION_ID, HASH_COMPLETE, IDX_STRING, RES_ID ) VALUES ( 10, + '2024-04-05', + 1, '5570851350247697202', 'Patient?birthdate=1974-12-25&family=WINDSOR&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cmale', 1653 ); + +INSERT INTO HFJ_BLK_IMPORT_JOB ( + PID, + JOB_ID, + JOB_STATUS, + STATUS_TIME, + STATUS_MESSAGE, + JOB_DESC, + OPTLOCK, + FILE_COUNT, + ROW_PROCESSING_MODE, + BATCH_SIZE +) VALUES ( + 61, + '0fe3744a-93f3-4bb8-a59e-15643566e513', + 'READY', + '2024-04-04 16:09:14.488', + '', + 'ETL Import Job: (unnamed)', + 0, + 1, + 'FHIR_TRANSACTION', + 10 +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 65, + 61, + 72995, + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO TRM_VALUESET ( + PID, + EXPANSION_STATUS, + EXPANDED_AT, + VSNAME, + RES_ID, + TOTAL_CONCEPT_DESIGNATIONS, + TOTAL_CONCEPTS, + URL, + VER +) VALUES ( + 60, + 'EXPANDED', + '2024-04-04 16:09:14.488', + 'v2.0926', + 1654, + 0, + 3, + 'http://terminology.hl7.org/ValueSet/v2-0926', + '2.9' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/H2_EMBEDDED.sql new file mode 100644 index 00000000000..17c78e1425e --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/H2_EMBEDDED.sql @@ -0,0 +1,69 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC +) VALUES ( + 1656, + '2024-04-05', + 1, + '2024-04-05', + 'R4', + 0, + '2023-06-01 15:54:05.296', + '2023-06-01 15:54:05.296', + 'JSON', + 1413284, + 1656, + 'Observation', + 1, + '{"resourceType":"Observation","code":{"coding":[{"system":"http://loinc.org","code":"8302-2","display":"Body Height"}]},"subject":{"reference":"Patient/4454"},"effectiveDateTime":"2022-02-09T00:00:00Z","valueQuantity":{"value":0.12,"unit":"cm","system":"http://unitsofmeasure.org","code":"cm"}}' +); + +INSERT INTO MPI_LINK ( + PID, + CREATED, + EID_MATCH, + TARGET_TYPE, + LINK_SOURCE, + MATCH_RESULT, + NEW_PERSON, + PERSON_PID, + SCORE, + TARGET_PID, + UPDATED, + VECTOR, + VERSION, + GOLDEN_RESOURCE_PID, + RULE_COUNT, + PARTITION_DATE, + PARTITION_ID +) VALUES ( + 2, + '2024-04-05 15:16:26.43', + 1, + 'Observation', + 0, + 2, + 1, + 1906, + 1.0, + 1702, + '2023-04-05 15:16:26.43', + 61, + '1', + 1656, + 1, + '2024-04-05', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/MSSQL_2012.sql new file mode 100644 index 00000000000..1aaec74ad51 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/MSSQL_2012.sql @@ -0,0 +1,69 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC +) VALUES ( + 1656, + '2024-04-05', + 1, + '2024-04-05 15:54:05.296', + 'R4', + 'false', + '2023-05-01 15:54:05.296', + '2023-05-01 15:54:05.296', + 'JSON', + 1413284, + 1653, + 'Observation', + 1, + '{"resourceType":"Observation","code":{"coding":[{"system":"http://loinc.org","code":"8302-2","display":"Body Height"}]},"subject":{"reference":"Patient/4454"},"effectiveDateTime":"2022-02-09T00:00:00Z","valueQuantity":{"value":0.12,"unit":"cm","system":"http://unitsofmeasure.org","code":"cm"}}' +); + +INSERT INTO MPI_LINK ( + PID, + CREATED, + EID_MATCH, + TARGET_TYPE, + LINK_SOURCE, + MATCH_RESULT, + NEW_PERSON, + PERSON_PID, + SCORE, + TARGET_PID, + UPDATED, + VECTOR, + VERSION, + GOLDEN_RESOURCE_PID, + RULE_COUNT, + PARTITION_DATE, + PARTITION_ID +) VALUES ( + 2, + '2023-04-05 15:16:26.43', + 'true', + 'Observation', + 0, + 2, + 'true', + 1906, + 1.0, + 1702, + '2023-04-05 15:16:26.43', + 61, + '1', + 1653, + 1, + '2024-04-05', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/ORACLE_12C.sql new file mode 100644 index 00000000000..d7b4407a023 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/ORACLE_12C.sql @@ -0,0 +1,69 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC +) VALUES ( + 1653, + SYSDATE, + 1, + SYSDATE, + 'R4', + 0, + SYSDATE, + SYSDATE, + 'JSON', + '1413284', + 1653, + 'Observation', + 1, + '{"resourceType":"Observation","code":{"coding":[{"system":"http://loinc.org","code":"8302-2","display":"Body Height"}]},"subject":{"reference":"Patient/4454"},"effectiveDateTime":"2022-02-09T00:00:00Z","valueQuantity":{"value":0.12,"unit":"cm","system":"http://unitsofmeasure.org","code":"cm"}}' +); + +INSERT INTO MPI_LINK ( + PID, + CREATED, + EID_MATCH, + TARGET_TYPE, + LINK_SOURCE, + MATCH_RESULT, + NEW_PERSON, + PERSON_PID, + SCORE, + TARGET_PID, + UPDATED, + VECTOR, + VERSION, + GOLDEN_RESOURCE_PID, + RULE_COUNT, + PARTITION_DATE, + PARTITION_ID +) VALUES ( + 2, + SYSDATE, + 1, + 'Observation', + 0, + 2, + 1, + 1906, + 1.0, + 1702, + SYSDATE, + 61, + '1', + 1653, + 1, + SYSDATE, + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/POSTGRES_9_4.sql new file mode 100644 index 00000000000..404af60dd1b --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_7_0/data/POSTGRES_9_4.sql @@ -0,0 +1,69 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC +) VALUES ( + 1653, + '2024-04-05', + 1, + '2024-04-05 15:54:05.296', + 'R4', + FALSE, + '2023-05-01 15:54:05.296', + '2023-05-01 15:54:05.296', + 'JSON', + 1413284, + 1653, + 'Observation', + 1, + '{"resourceType":"Observation","code":{"coding":[{"system":"http://loinc.org","code":"8302-2","display":"Body Height"}]},"subject":{"reference":"Patient/4454"},"effectiveDateTime":"2022-02-09T00:00:00Z","valueQuantity":{"value":0.12,"unit":"cm","system":"http://unitsofmeasure.org","code":"cm"}}' +); + +INSERT INTO MPI_LINK ( + PID, + CREATED, + EID_MATCH, + TARGET_TYPE, + LINK_SOURCE, + MATCH_RESULT, + NEW_PERSON, + PERSON_PID, + SCORE, + TARGET_PID, + UPDATED, + VECTOR, + VERSION, + GOLDEN_RESOURCE_PID, + RULE_COUNT, + PARTITION_DATE, + PARTITION_ID +) VALUES ( + 2, + '2023-04-05 15:16:26.43', + TRUE, + 'Observation', + 0, + 2, + TRUE, + 1906, + 1.0, + 1702, + '2023-04-05 15:16:26.43', + 61, + '1', + 1653, + 1, + '2024-04-05', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/H2_EMBEDDED.sql index 1768c4ef7da..64d50ee7806 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/H2_EMBEDDED.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/H2_EMBEDDED.sql @@ -14,6 +14,9 @@ INSERT INTO BT2_JOB_INSTANCE ( PROGRESS_PCT, START_TIME, STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, WORK_CHUNKS_PURGED ) VALUES ( '00161699-bcfe-428e-9ca2-caceb9645f8a', @@ -31,6 +34,9 @@ INSERT INTO BT2_JOB_INSTANCE ( 1, '2023-07-06 14:24:10.875', 'COMPLETED', + 200, + 'Error message', + 83006, 1 ); @@ -46,7 +52,9 @@ INSERT INTO BT2_WORK_CHUNK ( SEQ, START_TIME, STAT, - TGT_STEP_ID + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA ) VALUES ( '01d26875-8d1a-4e37-b554-62a3219f009b', '2023-07-06 15:20:20.797', @@ -59,5 +67,7 @@ INSERT INTO BT2_WORK_CHUNK ( 0, '2023-07-06 15:21:11.14', 'COMPLETED', - 'ReadInResourcesFromFileStep' + 'ReadInResourcesFromFileStep', + 'Error message', + 72995 ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/MSSQL_2012.sql index 1768c4ef7da..64d50ee7806 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/MSSQL_2012.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/MSSQL_2012.sql @@ -14,6 +14,9 @@ INSERT INTO BT2_JOB_INSTANCE ( PROGRESS_PCT, START_TIME, STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, WORK_CHUNKS_PURGED ) VALUES ( '00161699-bcfe-428e-9ca2-caceb9645f8a', @@ -31,6 +34,9 @@ INSERT INTO BT2_JOB_INSTANCE ( 1, '2023-07-06 14:24:10.875', 'COMPLETED', + 200, + 'Error message', + 83006, 1 ); @@ -46,7 +52,9 @@ INSERT INTO BT2_WORK_CHUNK ( SEQ, START_TIME, STAT, - TGT_STEP_ID + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA ) VALUES ( '01d26875-8d1a-4e37-b554-62a3219f009b', '2023-07-06 15:20:20.797', @@ -59,5 +67,7 @@ INSERT INTO BT2_WORK_CHUNK ( 0, '2023-07-06 15:21:11.14', 'COMPLETED', - 'ReadInResourcesFromFileStep' + 'ReadInResourcesFromFileStep', + 'Error message', + 72995 ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/ORACLE_12C.sql index d664c49edb5..eb70da60cfd 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/ORACLE_12C.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/ORACLE_12C.sql @@ -14,6 +14,9 @@ INSERT INTO BT2_JOB_INSTANCE ( PROGRESS_PCT, START_TIME, STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, WORK_CHUNKS_PURGED ) VALUES ( '00161699-bcfe-428e-9ca2-caceb9645f8a', @@ -31,6 +34,9 @@ INSERT INTO BT2_JOB_INSTANCE ( 1, SYSDATE, 'COMPLETED', + 200, + 'Error message', + HEXTORAW('8B9D5255'), 1 ); @@ -46,7 +52,9 @@ INSERT INTO BT2_WORK_CHUNK ( SEQ, START_TIME, STAT, - TGT_STEP_ID + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA ) VALUES ( '01d26875-8d1a-4e37-b554-62a3219f009b', SYSDATE, @@ -59,5 +67,7 @@ INSERT INTO BT2_WORK_CHUNK ( 0, SYSDATE, 'COMPLETED', - 'ReadInResourcesFromFileStep' + 'ReadInResourcesFromFileStep', + 'Error message', + HEXTORAW('453d7a34') ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/POSTGRES_9_4.sql index cc51eddda1f..e4bd609d03e 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_0_0/data/POSTGRES_9_4.sql @@ -14,6 +14,9 @@ INSERT INTO BT2_JOB_INSTANCE ( PROGRESS_PCT, START_TIME, STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, WORK_CHUNKS_PURGED ) VALUES ( '00161699-bcfe-428e-9ca2-caceb9645f8a', @@ -31,6 +34,9 @@ INSERT INTO BT2_JOB_INSTANCE ( 1, '2023-07-06 14:24:10.875', 'COMPLETED', + 200, + 'Error message', + 83006, true ); @@ -46,7 +52,9 @@ INSERT INTO BT2_WORK_CHUNK ( SEQ, START_TIME, STAT, - TGT_STEP_ID + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA ) VALUES ( '01d26875-8d1a-4e37-b554-62a3219f009b', '2023-07-06 15:20:20.797', @@ -59,5 +67,7 @@ INSERT INTO BT2_WORK_CHUNK ( 0, '2023-07-06 15:21:11.14', 'COMPLETED', - 'ReadInResourcesFromFileStep' + 'ReadInResourcesFromFileStep', + 'Error message', + 72995 ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/H2_EMBEDDED.sql new file mode 100644 index 00000000000..4a16c2e8564 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/H2_EMBEDDED.sql @@ -0,0 +1,43 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT +) VALUES ( + 'a59cb9c2-f699-44c7-bfee-93f1e6f68038', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"d184be05-a634-40c3-a3e6-ab811329308b","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + 1, + 72995 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/MSSQL_2012.sql new file mode 100644 index 00000000000..4a16c2e8564 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/MSSQL_2012.sql @@ -0,0 +1,43 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT +) VALUES ( + 'a59cb9c2-f699-44c7-bfee-93f1e6f68038', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"d184be05-a634-40c3-a3e6-ab811329308b","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + 1, + 72995 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/ORACLE_12C.sql new file mode 100644 index 00000000000..75fe3dd5479 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/ORACLE_12C.sql @@ -0,0 +1,43 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT +) VALUES ( + 'a59cb9c2-f699-44c7-bfee-93f1e6f68038', + 0, + 0, + 0, + SYSDATE, + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + SYSDATE, + 0, + '0ms', + '{"jobId":"d184be05-a634-40c3-a3e6-ab811329308b","batchSize":100}', + 1, + SYSDATE, + 'COMPLETED', + 200, + 'Error message', + HEXTORAW('8B9D5255'), + 1, + HEXTORAW('453d7a34') +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/POSTGRES_9_4.sql new file mode 100644 index 00000000000..793109a7344 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_1_0/data/POSTGRES_9_4.sql @@ -0,0 +1,43 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT +) VALUES ( + 'a59cb9c2-f699-44c7-bfee-93f1e6f68038', + false, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"d184be05-a634-40c3-a3e6-ab811329308b","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + true, + 72995 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/H2_EMBEDDED.sql new file mode 100644 index 00000000000..20b881e66a0 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/H2_EMBEDDED.sql @@ -0,0 +1,81 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT, + FAST_TRACKING, + UPDATE_TIME +) VALUES ( + 'cf969d58-44a5-4493-89c1-437e637532eb', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"37698660-4a0f-4c9c-84d5-b6f231def870","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + 1, + 72995, + 0, + '2023-08-06 14:24:10.875' +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + UPDATE_TIME +) VALUES ( + '3a8d1bce-3a49-4f3d-98ac-f813b053772f', + '2023-07-06 15:20:20.797', + '2023-07-06 15:21:11.142', + 0, + 'cf969d58-44a5-4493-89c1-437e637532eb', + 'bulkImportJob', + 1, + 0, + 0, + '2023-07-06 15:21:11.14', + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + 72995, + '2023-07-06 15:21:11.14' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/MSSQL_2012.sql new file mode 100644 index 00000000000..0e70f16393d --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/MSSQL_2012.sql @@ -0,0 +1,81 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT, + FAST_TRACKING, + UPDATE_TIME +) VALUES ( + 'cf969d58-44a5-4493-89c1-437e637532eb', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"37698660-4a0f-4c9c-84d5-b6f231def870","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + 1, + 72995, + 'false', + '2023-07-06 15:21:11.14' +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + UPDATE_TIME +) VALUES ( + '3a8d1bce-3a49-4f3d-98ac-f813b053772f', + '2023-07-06 15:20:20.797', + '2023-07-06 15:21:11.142', + 0, + 'cf969d58-44a5-4493-89c1-437e637532eb', + 'bulkImportJob', + 1, + 0, + 0, + '2023-07-06 15:21:11.14', + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + 72995, + '2023-07-06 15:21:11.14' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/ORACLE_12C.sql new file mode 100644 index 00000000000..96cb72a546f --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/ORACLE_12C.sql @@ -0,0 +1,81 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT, + FAST_TRACKING, + UPDATE_TIME +) VALUES ( + 'cf969d58-44a5-4493-89c1-437e637532eb', + 0, + 0, + 0, + SYSDATE, + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + SYSDATE, + 0, + '0ms', + '{"jobId":"37698660-4a0f-4c9c-84d5-b6f231def870","batchSize":100}', + 1, + SYSDATE, + 'COMPLETED', + 200, + 'Error message', + HEXTORAW('8B9D5255'), + 1, + HEXTORAW('453d7a34'), + 0, + SYSDATE +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + UPDATE_TIME +) VALUES ( + '3a8d1bce-3a49-4f3d-98ac-f813b053772f', + SYSDATE, + SYSDATE, + 0, + 'cf969d58-44a5-4493-89c1-437e637532eb', + 'bulkImportJob', + 1, + 0, + 0, + SYSDATE, + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + HEXTORAW('453d7a34'), + SYSDATE +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/POSTGRES_9_4.sql new file mode 100644 index 00000000000..56b6c515beb --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_2_0/data/POSTGRES_9_4.sql @@ -0,0 +1,81 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + WORK_CHUNKS_PURGED, + REPORT, + FAST_TRACKING, + UPDATE_TIME +) VALUES ( + 'cf969d58-44a5-4493-89c1-437e637532eb', + false, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"37698660-4a0f-4c9c-84d5-b6f231def870","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + true, + 72995, + false, + '2023-08-06 14:24:10.875' +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + UPDATE_TIME +) VALUES ( + '3a8d1bce-3a49-4f3d-98ac-f813b053772f', + '2023-07-06 15:20:20.797', + '2023-07-06 15:21:11.142', + 0, + 'cf969d58-44a5-4493-89c1-437e637532eb', + 'bulkImportJob', + 1, + 0, + 0, + '2023-07-06 15:21:11.14', + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + 72995, + '2023-07-06 15:21:11.14' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/H2_EMBEDDED.sql new file mode 100644 index 00000000000..f596d5ddb82 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/H2_EMBEDDED.sql @@ -0,0 +1,45 @@ +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + FHIR_ID, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +) VALUES ( + 1670, + 'R4', + 0, + '2023-06-15 09:58:42.92', + '2023-06-15 09:58:42.92', + '1670', + 0, + '6beed652b77f6c65d776e57341a0b5b0596ac9cfb0e8345a5a5cfbfaa59e2b62', + 1, + 0, + 0, + 0, + 0, + 1, + 0, + 0, + 1, + 0, + 1, + 'Observation', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/MSSQL_2012.sql new file mode 100644 index 00000000000..16b21884ab9 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/MSSQL_2012.sql @@ -0,0 +1,45 @@ +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + FHIR_ID, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +) VALUES ( + 1670, + 'R4', + 'false', + '2023-06-15 09:58:42.92', + '2023-06-15 09:58:42.92', + '1670', + 'false', + '6beed652b77f6c65d776e57341a0b5b0596ac9cfb0e8345a5a5cfbfaa59e2b62', + 1, + 'false', + 'false', + 'false', + 'false', + 'true', + 'false', + 'false', + 'true', + 'false', + 'true', + 'Observation', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/ORACLE_12C.sql new file mode 100644 index 00000000000..9e3cb095ad4 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/ORACLE_12C.sql @@ -0,0 +1,45 @@ +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + FHIR_ID, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +) VALUES ( + 1670, + 'R4', + 0, + SYSDATE, + SYSDATE, + '1670', + 0, + '6beed652b77f6c65d776e57341a0b5b0596ac9cfb0e8345a5a5cfbfaa59e2b62', + 1, + 0, + 0, + 0, + 0, + 1, + 0, + 0, + 1, + 0, + 1, + 'Observation', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/POSTGRES_9_4.sql new file mode 100644 index 00000000000..52fed544922 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_3_0/data/POSTGRES_9_4.sql @@ -0,0 +1,45 @@ +INSERT INTO HFJ_RESOURCE ( + RES_ID, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + FHIR_ID, + SP_HAS_LINKS, + HASH_SHA256, + SP_INDEX_STATUS, + SP_CMPSTR_UNIQ_PRESENT, + SP_COORDS_PRESENT, + SP_DATE_PRESENT, + SP_NUMBER_PRESENT, + SP_QUANTITY_NRML_PRESENT, + SP_QUANTITY_PRESENT, + SP_STRING_PRESENT, + SP_TOKEN_PRESENT, + SP_CMPTOKS_PRESENT, + SP_URI_PRESENT, + RES_TYPE, + RES_VER +) VALUES ( + 1670, + 'R4', + FALSE, + '2023-06-15 09:58:42.92', + '2023-06-15 09:58:42.92', + '1670', + FALSE, + '6beed652b77f6c65d776e57341a0b5b0596ac9cfb0e8345a5a5cfbfaa59e2b62', + 1, + FALSE, + FALSE, + FALSE, + FALSE, + TRUE, + FALSE, + FALSE, + TRUE, + FALSE, + TRUE, + 'Observation', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/H2_EMBEDDED.sql index c631ef56c5e..18b1fe9bdbf 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/H2_EMBEDDED.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/H2_EMBEDDED.sql @@ -16,6 +16,7 @@ INSERT INTO HFJ_RESOURCE ( SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, + SEARCH_URL_PRESENT, RES_TYPE, RES_VER ) VALUES ( @@ -36,6 +37,7 @@ INSERT INTO HFJ_RESOURCE ( 0, 1, 1, + 0, 'Observation', 1 ); @@ -52,9 +54,11 @@ INSERT INTO HFJ_RES_SEARCH_URL ( ); INSERT INTO HFJ_REVINFO ( - REV + REV, + REVTSTMP ) VALUES ( - 1 + 1, + '2024-03-29 10:14:40.69' ); INSERT INTO MPI_LINK_AUD ( @@ -71,7 +75,12 @@ INSERT INTO MPI_LINK_AUD ( VERSION, EID_MATCH, NEW_PERSON, - SCORE + SCORE, + CREATED, + UPDATED, + VECTOR, + PARTITION_ID, + PARTITION_DATE ) VALUES ( 1, 1, @@ -86,7 +95,42 @@ INSERT INTO MPI_LINK_AUD ( 1, 0, 1, - 1 + 1, + '2024-03-29 10:14:40.69', + '2024-03-29 10:14:41.70', + 3, + 1, + '2024-04-05' ); +INSERT INTO HFJ_TAG_DEF ( + TAG_ID, + TAG_CODE, + TAG_DISPLAY, + TAG_SYSTEM, + TAG_TYPE, + TAG_USER_SELECTED, + TAG_VERSION +) VALUES ( + 3, + 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient', + 'display', + 'https://github.com/hapifhir/hapi-fhir/ns/jpa/profile', + 1, + 0, + 'V 1.0' +); +INSERT INTO HFJ_RESOURCE_MODIFIED ( + RES_ID, + RES_VER, + CREATED_TIME, + RESOURCE_TYPE, + SUMMARY_MESSAGE +) VALUES ( + '1', + '1', + '2024-03-30 10:14:41.70', + 'Observation', + '{"operationType":"CREATE","attributes":{"attKey":"attValue"},"transactionId":"txId","mediaType":"json","messageKey":"messageKey","payloadId":"Observation/1","partitionId":{"allPartitions":true},"payloadVersion":"1","subscriptionId":"subId"}' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/MSSQL_2012.sql index c631ef56c5e..18b1fe9bdbf 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/MSSQL_2012.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/MSSQL_2012.sql @@ -16,6 +16,7 @@ INSERT INTO HFJ_RESOURCE ( SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, + SEARCH_URL_PRESENT, RES_TYPE, RES_VER ) VALUES ( @@ -36,6 +37,7 @@ INSERT INTO HFJ_RESOURCE ( 0, 1, 1, + 0, 'Observation', 1 ); @@ -52,9 +54,11 @@ INSERT INTO HFJ_RES_SEARCH_URL ( ); INSERT INTO HFJ_REVINFO ( - REV + REV, + REVTSTMP ) VALUES ( - 1 + 1, + '2024-03-29 10:14:40.69' ); INSERT INTO MPI_LINK_AUD ( @@ -71,7 +75,12 @@ INSERT INTO MPI_LINK_AUD ( VERSION, EID_MATCH, NEW_PERSON, - SCORE + SCORE, + CREATED, + UPDATED, + VECTOR, + PARTITION_ID, + PARTITION_DATE ) VALUES ( 1, 1, @@ -86,7 +95,42 @@ INSERT INTO MPI_LINK_AUD ( 1, 0, 1, - 1 + 1, + '2024-03-29 10:14:40.69', + '2024-03-29 10:14:41.70', + 3, + 1, + '2024-04-05' ); +INSERT INTO HFJ_TAG_DEF ( + TAG_ID, + TAG_CODE, + TAG_DISPLAY, + TAG_SYSTEM, + TAG_TYPE, + TAG_USER_SELECTED, + TAG_VERSION +) VALUES ( + 3, + 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient', + 'display', + 'https://github.com/hapifhir/hapi-fhir/ns/jpa/profile', + 1, + 0, + 'V 1.0' +); +INSERT INTO HFJ_RESOURCE_MODIFIED ( + RES_ID, + RES_VER, + CREATED_TIME, + RESOURCE_TYPE, + SUMMARY_MESSAGE +) VALUES ( + '1', + '1', + '2024-03-30 10:14:41.70', + 'Observation', + '{"operationType":"CREATE","attributes":{"attKey":"attValue"},"transactionId":"txId","mediaType":"json","messageKey":"messageKey","payloadId":"Observation/1","partitionId":{"allPartitions":true},"payloadVersion":"1","subscriptionId":"subId"}' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/ORACLE_12C.sql index 105575b8853..44433dde081 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/ORACLE_12C.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/ORACLE_12C.sql @@ -16,6 +16,7 @@ INSERT INTO HFJ_RESOURCE ( SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, + SEARCH_URL_PRESENT, RES_TYPE, RES_VER ) VALUES ( @@ -36,6 +37,7 @@ INSERT INTO HFJ_RESOURCE ( 0, 1, 1, + 0, 'Observation', 1 ); @@ -52,9 +54,11 @@ INSERT INTO HFJ_RES_SEARCH_URL ( ); INSERT INTO HFJ_REVINFO ( - REV + REV, + REVTSTMP ) VALUES ( - 1 + 1, + SYSDATE ); INSERT INTO MPI_LINK_AUD ( @@ -71,7 +75,12 @@ INSERT INTO MPI_LINK_AUD ( VERSION, EID_MATCH, NEW_PERSON, - SCORE + SCORE, + CREATED, + UPDATED, + VECTOR, + PARTITION_ID, + PARTITION_DATE ) VALUES ( 1, 1, @@ -86,7 +95,42 @@ INSERT INTO MPI_LINK_AUD ( 1, 0, 1, - 1 + 1, + SYSDATE, + SYSDATE, + 3, + 1, + SYSDATE ); +INSERT INTO HFJ_TAG_DEF ( + TAG_ID, + TAG_CODE, + TAG_DISPLAY, + TAG_SYSTEM, + TAG_TYPE, + TAG_USER_SELECTED, + TAG_VERSION +) VALUES ( + 3, + 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient', + 'display', + 'https://github.com/hapifhir/hapi-fhir/ns/jpa/profile', + 1, + 0, + 'V 1.0' +); +INSERT INTO HFJ_RESOURCE_MODIFIED ( + RES_ID, + RES_VER, + CREATED_TIME, + RESOURCE_TYPE, + SUMMARY_MESSAGE +) VALUES ( + '1', + '1', + SYSDATE, + 'Observation', + '{"operationType":"CREATE","attributes":{"attKey":"attValue"},"transactionId":"txId","mediaType":"json","messageKey":"messageKey","payloadId":"Observation/1","partitionId":{"allPartitions":true},"payloadVersion":"1","subscriptionId":"subId"}' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/POSTGRES_9_4.sql index d83deca74f9..337fd09228e 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_6_0/data/POSTGRES_9_4.sql @@ -16,6 +16,7 @@ INSERT INTO HFJ_RESOURCE ( SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, + SEARCH_URL_PRESENT, RES_TYPE, RES_VER ) VALUES ( @@ -36,6 +37,7 @@ INSERT INTO HFJ_RESOURCE ( false, true, true, + false, 'Observation', 1 ); @@ -52,9 +54,11 @@ INSERT INTO HFJ_RES_SEARCH_URL ( ); INSERT INTO HFJ_REVINFO ( - REV + REV, + REVTSTMP ) VALUES ( - 1 + 1, + '2024-03-29 10:14:40.69' ); INSERT INTO MPI_LINK_AUD ( @@ -71,7 +75,12 @@ INSERT INTO MPI_LINK_AUD ( VERSION, EID_MATCH, NEW_PERSON, - SCORE + SCORE, + CREATED, + UPDATED, + VECTOR, + PARTITION_ID, + PARTITION_DATE ) VALUES ( 1, 1, @@ -86,7 +95,42 @@ INSERT INTO MPI_LINK_AUD ( 1, false, true, - 1 + 1, + '2024-03-29 10:14:40.69', + '2024-03-29 10:14:41.70', + 3, + 1, + '2024-04-05' ); +INSERT INTO HFJ_TAG_DEF ( + TAG_ID, + TAG_CODE, + TAG_DISPLAY, + TAG_SYSTEM, + TAG_TYPE, + TAG_USER_SELECTED, + TAG_VERSION +) VALUES ( + 3, + 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient', + 'display', + 'https://github.com/hapifhir/hapi-fhir/ns/jpa/profile', + 1, + false, + 'V 1.0' +); +INSERT INTO HFJ_RESOURCE_MODIFIED ( + RES_ID, + RES_VER, + CREATED_TIME, + RESOURCE_TYPE, + SUMMARY_MESSAGE +) VALUES ( + '1', + '1', + '2024-03-30 10:14:41.70', + 'Observation', + '{"operationType":"CREATE","attributes":{"attKey":"attValue"},"transactionId":"txId","mediaType":"json","messageKey":"messageKey","payloadId":"Observation/1","partitionId":{"allPartitions":true},"payloadVersion":"1","subscriptionId":"subId"}' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/H2_EMBEDDED.sql new file mode 100644 index 00000000000..cd68afd53a8 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/H2_EMBEDDED.sql @@ -0,0 +1,195 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC, + REQUEST_ID, + SOURCE_URI +) VALUES ( + 1678, + '2024-04-05', + 1, + '2024-04-05 15:54:05.296', + 'R4', + 0, + '2023-05-01 15:54:05.296', + '2023-05-01 15:54:05.296', + 'JSON', + 1413284, + 1678, + 'Observation', + 1, + '{"resourceType":"Patient","meta":{"profile":["http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient"]},"name":[{"family":"Lastname","given":["Firstname"],"prefix":["Mrs"]}],"telecom":[{"system":"phone","value":"1-800-683-1319","use":"home"}],"gender":"female","birthDate":"1960-09-30","address":[{"line":["622 College Street"],"city":"Toronto","state":"Ontario","country":"Canada"}],"generalPractitioner":[{"reference":"Practitioner/P1","display":"Dr S. Doctor"}],"managingOrganization":{"reference":"Organization/O1","display":"Bedrock General Hospital"}}', + 'OUNwoE5RUlX8UuEF', + 'urn:source:1' +); + +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + 72995, + 'report', + 0, + '2023-08-06 14:24:10.875', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + CHUNK_DATA_VC, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '6bcd7278-eacb-4f97-b6ed-4086b36747e5', + '2023-07-06 15:20:20.797', + '2023-07-06 15:21:11.142', + 0, + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + 'bulkImportJob', + 1, + 0, + 0, + '2023-07-06 15:21:11.14', + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + 72995, + '{"key":"value"}', + '2023-07-06 15:21:11.14', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + JOB_CONTENTS_VC, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 66, + 61, + 72995, + 'content_1', + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO HFJ_SEARCH ( + PID, + CREATED, + SEARCH_DELETED, + EXPIRY_OR_NULL, + FAILURE_CODE, + FAILURE_MESSAGE, + LAST_UPDATED_HIGH, + LAST_UPDATED_LOW, + NUM_BLOCKED, + NUM_FOUND, + PREFERRED_PAGE_SIZE, + RESOURCE_ID, + RESOURCE_TYPE, + SEARCH_PARAM_MAP, + SEARCH_PARAM_MAP_BIN, + SEARCH_QUERY_STRING, + SEARCH_QUERY_STRING_HASH, + SEARCH_QUERY_STRING_VC, + SEARCH_TYPE, + SEARCH_STATUS, + TOTAL_COUNT, + SEARCH_UUID, + OPTLOCK_VERSION +) VALUES ( + 53, + '2023-05-01 16:52:09.149', + FALSE, + '2024-04-05 15:16:26.43', + 500, + 'Failure Message', + '2024-04-05 15:17:26.43', + '2024-04-05 15:18:26.43', + 0, + 1, + 10, + 1906, + 'Patient', + 5146583, + 7368816, + 8479927, + 1212873581, + '?_lastUpdated=le2021-05-31', + 1, + 'FINISHED', + 1, + '2999ba50-f77c-11ee-89b4-0800200c9a66', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/MSSQL_2012.sql new file mode 100644 index 00000000000..a3b483aea13 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/MSSQL_2012.sql @@ -0,0 +1,195 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC, + REQUEST_ID, + SOURCE_URI +) VALUES ( + 1678, + '2024-04-05', + 1, + '2024-04-05 15:54:05.296', + 'R4', + 'false', + '2023-05-01 15:54:05.296', + '2023-05-01 15:54:05.296', + 'JSON', + 1413284, + 1678, + 'Observation', + 1, + '{"resourceType":"Patient","meta":{"profile":["http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient"]},"name":[{"family":"Lastname","given":["Firstname"],"prefix":["Mrs"]}],"telecom":[{"system":"phone","value":"1-800-683-1319","use":"home"}],"gender":"female","birthDate":"1960-09-30","address":[{"line":["622 College Street"],"city":"Toronto","state":"Ontario","country":"Canada"}],"generalPractitioner":[{"reference":"Practitioner/P1","display":"Dr S. Doctor"}],"managingOrganization":{"reference":"Organization/O1","display":"Bedrock General Hospital"}}', + 'OUNwoE5RUlX8UuEF', + 'urn:source:1' +); + +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + 72995, + 'report', + 'false', + '2023-08-06 14:24:10.875', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + CHUNK_DATA_VC, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '6bcd7278-eacb-4f97-b6ed-4086b36747e5', + '2023-07-06 15:20:20.797', + '2023-07-06 15:21:11.142', + 0, + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + 'bulkImportJob', + 1, + 0, + 0, + '2023-07-06 15:21:11.14', + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + 72995, + '{"key":"value"}', + '2023-07-06 15:21:11.14', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + JOB_CONTENTS_VC, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 66, + 61, + 72995, + 'content_1', + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO HFJ_SEARCH ( + PID, + CREATED, + SEARCH_DELETED, + EXPIRY_OR_NULL, + FAILURE_CODE, + FAILURE_MESSAGE, + LAST_UPDATED_HIGH, + LAST_UPDATED_LOW, + NUM_BLOCKED, + NUM_FOUND, + PREFERRED_PAGE_SIZE, + RESOURCE_ID, + RESOURCE_TYPE, + SEARCH_PARAM_MAP, + SEARCH_PARAM_MAP_BIN, + SEARCH_QUERY_STRING, + SEARCH_QUERY_STRING_HASH, + SEARCH_QUERY_STRING_VC, + SEARCH_TYPE, + SEARCH_STATUS, + TOTAL_COUNT, + SEARCH_UUID, + OPTLOCK_VERSION +) VALUES ( + 53, + '2023-05-01 16:52:09.149', + 0, + '2024-04-05 15:16:26.43', + 500, + 'Failure Message', + '2024-04-05 15:17:26.43', + '2024-04-05 15:18:26.43', + 0, + 1, + 10, + 1906, + 'Patient', + 5146583, + 7368816, + 8479927, + 1212873581, + '?_lastUpdated=le2021-05-31', + 1, + 'FINISHED', + 1, + '2999ba50-f77c-11ee-89b4-0800200c9a66', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/ORACLE_12C.sql new file mode 100644 index 00000000000..4741c649397 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/ORACLE_12C.sql @@ -0,0 +1,195 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC, + REQUEST_ID, + SOURCE_URI +) VALUES ( + 1678, + SYSDATE, + 1, + SYSDATE, + 'R4', + 0, + SYSDATE, + SYSDATE, + 'JSON', + '1413284', + 1678, + 'Observation', + 1, + '{"resourceType":"Patient","meta":{"profile":["http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient"]},"name":[{"family":"Lastname","given":["Firstname"],"prefix":["Mrs"]}],"telecom":[{"system":"phone","value":"1-800-683-1319","use":"home"}],"gender":"female","birthDate":"1960-09-30","address":[{"line":["622 College Street"],"city":"Toronto","state":"Ontario","country":"Canada"}],"generalPractitioner":[{"reference":"Practitioner/P1","display":"Dr S. Doctor"}],"managingOrganization":{"reference":"Organization/O1","display":"Bedrock General Hospital"}}', + 'OUNwoE5RUlX8UuEF', + 'urn:source:1' +); + +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + 0, + 0, + 0, + SYSDATE, + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + SYSDATE, + 0, + '0ms', + '{"jobId":"37698660-4a0f-4c9c-84d5-b6f231def870","batchSize":100}', + 1, + SYSDATE, + 'COMPLETED', + 200, + 'Error message', + HEXTORAW('8B9D5255'), + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + HEXTORAW('453d7a34'), + 'report', + 0, + SYSDATE, + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + CHUNK_DATA_VC, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '6bcd7278-eacb-4f97-b6ed-4086b36747e5', + SYSDATE, + SYSDATE, + 0, + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + 'bulkImportJob', + 1, + 0, + 0, + SYSDATE, + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + HEXTORAW('453d7a34'), + '{"key":"value"}', + SYSDATE, + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + JOB_CONTENTS_VC, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 66, + 61, + HEXTORAW('453d7a34'), + 'content_1', + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO HFJ_SEARCH ( + PID, + CREATED, + SEARCH_DELETED, + EXPIRY_OR_NULL, + FAILURE_CODE, + FAILURE_MESSAGE, + LAST_UPDATED_HIGH, + LAST_UPDATED_LOW, + NUM_BLOCKED, + NUM_FOUND, + PREFERRED_PAGE_SIZE, + RESOURCE_ID, + RESOURCE_TYPE, + SEARCH_PARAM_MAP, + SEARCH_PARAM_MAP_BIN, + SEARCH_QUERY_STRING, + SEARCH_QUERY_STRING_HASH, + SEARCH_QUERY_STRING_VC, + SEARCH_TYPE, + SEARCH_STATUS, + TOTAL_COUNT, + SEARCH_UUID, + OPTLOCK_VERSION +) VALUES ( + 53, + SYSDATE, + 0, + SYSDATE, + 500, + 'Failure Message', + SYSDATE, + SYSDATE, + 0, + 1, + 10, + 1906, + 'Patient', + HEXTORAW('1B25E293'), + HEXTORAW('453d7a34'), + HEXTORAW('28721FB0'), + 1212873581, + '?_lastUpdated=le2021-05-31', + 1, + 'FINISHED', + 1, + '2999ba50-f77c-11ee-89b4-0800200c9a66', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/POSTGRES_9_4.sql new file mode 100644 index 00000000000..75e6ca1aa22 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V6_8_0/data/POSTGRES_9_4.sql @@ -0,0 +1,195 @@ +INSERT INTO HFJ_RES_VER ( + PID, + PARTITION_DATE, + PARTITION_ID, + RES_DELETED_AT, + RES_VERSION, + HAS_TAGS, + RES_PUBLISHED, + RES_UPDATED, + RES_ENCODING, + RES_TEXT, + RES_ID, + RES_TYPE, + RES_VER, + RES_TEXT_VC, + REQUEST_ID, + SOURCE_URI +) VALUES ( + 1678, + '2024-04-05', + 1, + '2024-04-05 15:54:05.296', + 'R4', + FALSE, + '2023-05-01 15:54:05.296', + '2023-05-01 15:54:05.296', + 'JSON', + 1413284, + 1678, + 'Observation', + 1, + '{"resourceType":"Patient","meta":{"profile":["http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient"]},"name":[{"family":"Lastname","given":["Firstname"],"prefix":["Mrs"]}],"telecom":[{"system":"phone","value":"1-800-683-1319","use":"home"}],"gender":"female","birthDate":"1960-09-30","address":[{"line":["622 College Street"],"city":"Toronto","state":"Ontario","country":"Canada"}],"generalPractitioner":[{"reference":"Practitioner/P1","display":"Dr S. Doctor"}],"managingOrganization":{"reference":"Organization/O1","display":"Bedrock General Hospital"}}', + 'OUNwoE5RUlX8UuEF', + 'urn:source:1' +); + +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + false, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + true, + 72995, + 'report', + false, + '2023-08-06 14:24:10.875', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO BT2_WORK_CHUNK ( + ID, + CREATE_TIME, + END_TIME, + ERROR_COUNT, + INSTANCE_ID, + DEFINITION_ID, + DEFINITION_VER, + RECORDS_PROCESSED, + SEQ, + START_TIME, + STAT, + TGT_STEP_ID, + ERROR_MSG, + CHUNK_DATA, + CHUNK_DATA_VC, + UPDATE_TIME, + WARNING_MSG +) VALUES ( + '6bcd7278-eacb-4f97-b6ed-4086b36747e5', + '2023-07-06 15:20:20.797', + '2023-07-06 15:21:11.142', + 0, + '2eb2c28c-fa20-48df-9652-00669dcaea2c', + 'bulkImportJob', + 1, + 0, + 0, + '2023-07-06 15:21:11.14', + 'COMPLETED', + 'ReadInResourcesFromFileStep', + 'Error message', + 72995, + '{"key":"value"}', + '2023-07-06 15:21:11.14', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.' +); + +INSERT INTO HFJ_BLK_IMPORT_JOBFILE ( + PID, + JOB_PID, + JOB_CONTENTS, + JOB_CONTENTS_VC, + FILE_DESCRIPTION, + FILE_SEQ, + TENANT_NAME +) VALUES ( + 66, + 61, + 72995, + 'content_1', + 'Rows 0 - 10', + 0, + '' +); + +INSERT INTO HFJ_SEARCH ( + PID, + CREATED, + SEARCH_DELETED, + EXPIRY_OR_NULL, + FAILURE_CODE, + FAILURE_MESSAGE, + LAST_UPDATED_HIGH, + LAST_UPDATED_LOW, + NUM_BLOCKED, + NUM_FOUND, + PREFERRED_PAGE_SIZE, + RESOURCE_ID, + RESOURCE_TYPE, + SEARCH_PARAM_MAP, + SEARCH_PARAM_MAP_BIN, + SEARCH_QUERY_STRING, + SEARCH_QUERY_STRING_HASH, + SEARCH_QUERY_STRING_VC, + SEARCH_TYPE, + SEARCH_STATUS, + TOTAL_COUNT, + SEARCH_UUID, + OPTLOCK_VERSION +) VALUES ( + 53, + '2023-05-01 16:52:09.149', + FALSE, + '2024-04-05 15:16:26.43', + 500, + 'Failure Message', + '2024-04-05 15:17:26.43', + '2024-04-05 15:18:26.43', + 0, + 1, + 10, + 1906, + 'Patient', + 5146583, + '013d7d16d7ad4fefb61bd95b765c8ceb', + 8479927, + 1212873581, + '?_lastUpdated=le2021-05-31', + 1, + 'FINISHED', + 1, + '2999ba50-f77c-11ee-89b4-0800200c9a66', + 1 +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/H2_EMBEDDED.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/H2_EMBEDDED.sql new file mode 100644 index 00000000000..65889672dc4 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/H2_EMBEDDED.sql @@ -0,0 +1,57 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG, + USER_NAME, + CLIENT_ID +) VALUES ( + '79751e50-fbfc-479a-8e14-29bed1973d66', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + '{"jobId":"7be85199-0136-406e-93b8-cbe44f15aa6c","batchSize":100}', + 1, + 72995, + 'report', + 0, + '2023-08-06 14:24:10.875', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.', + 'triggeringUser', + 'clientId' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/MSSQL_2012.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/MSSQL_2012.sql new file mode 100644 index 00000000000..505d4bb9a20 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/MSSQL_2012.sql @@ -0,0 +1,57 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG, + USER_NAME, + CLIENT_ID +) VALUES ( + '79751e50-fbfc-479a-8e14-29bed1973d66', + 0, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + '{"jobId":"7be85199-0136-406e-93b8-cbe44f15aa6c","batchSize":100}', + 1, + 72995, + 'report', + 'false', + '2023-08-06 14:24:10.875', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.', + 'triggeringUser', + 'clientId' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/ORACLE_12C.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/ORACLE_12C.sql new file mode 100644 index 00000000000..90749c66044 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/ORACLE_12C.sql @@ -0,0 +1,57 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG, + USER_NAME, + CLIENT_ID +) VALUES ( + '79751e50-fbfc-479a-8e14-29bed1973d66', + 0, + 0, + 0, + SYSDATE, + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + SYSDATE, + 0, + '0ms', + '{"jobId":"37698660-4a0f-4c9c-84d5-b6f231def870","batchSize":100}', + 1, + SYSDATE, + 'COMPLETED', + 200, + 'Error message', + HEXTORAW('8B9D5255'), + '{"jobId":"7be85199-0136-406e-93b8-cbe44f15aa6c","batchSize":100}', + 1, + HEXTORAW('453d7a34'), + 'report', + 0, + SYSDATE, + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.', + 'triggeringUser', + 'clientId' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/POSTGRES_9_4.sql new file mode 100644 index 00000000000..b07e188a04d --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_0_0/data/POSTGRES_9_4.sql @@ -0,0 +1,57 @@ +INSERT INTO BT2_JOB_INSTANCE ( + ID, + JOB_CANCELLED, + CMB_RECS_PROCESSED, + CMB_RECS_PER_SEC, + CREATE_TIME, + CUR_GATED_STEP_ID, + DEFINITION_ID, + DEFINITION_VER, + END_TIME, + ERROR_COUNT, + EST_REMAINING, + PARAMS_JSON, + PROGRESS_PCT, + START_TIME, + STAT, + TOT_ELAPSED_MILLIS, + ERROR_MSG, + PARAMS_JSON_LOB, + PARAMS_JSON_VC, + WORK_CHUNKS_PURGED, + REPORT, + REPORT_VC, + FAST_TRACKING, + UPDATE_TIME, + WARNING_MSG, + USER_NAME, + CLIENT_ID +) VALUES ( + '79751e50-fbfc-479a-8e14-29bed1973d66', + false, + 0, + 0, + '2023-08-06 14:24:10.845', + 'WriteBundleForImportStep', + 'bulkImportJob', + 1, + '2023-08-06 14:25:11.098', + 0, + '0ms', + '{"jobId":"ec0df47d-8741-41cb-ab77-ca7c00c4799d","batchSize":100}', + 1, + '2023-08-06 14:24:10.875', + 'COMPLETED', + 200, + 'Error message', + 83006, + '{"jobId":"7be85199-0136-406e-93b8-cbe44f15aa6c","batchSize":100}', + true, + 72995, + 'report', + false, + '2023-08-06 14:24:10.875', + 'Failed to reindex resource because unique search parameter unique-code could not be enforced.', + 'triggeringUser', + 'clientId' +); diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java index b5fd1631e5b..b73cde3172e 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java @@ -82,8 +82,14 @@ public class HapiSchemaMigrationTest { VersionEnum.V5_3_0, VersionEnum.V5_4_0, VersionEnum.V5_5_0, + VersionEnum.V5_7_0, VersionEnum.V6_0_0, + VersionEnum.V6_1_0, + VersionEnum.V6_2_0, + VersionEnum.V6_3_0, VersionEnum.V6_6_0, + VersionEnum.V6_8_0, + VersionEnum.V7_0_0, VersionEnum.V7_2_0 ); From edd04916fec9049c5f4e8958ad8d63d8ffd5805f Mon Sep 17 00:00:00 2001 From: Brenin Rhodes Date: Mon, 15 Apr 2024 12:03:41 -0600 Subject: [PATCH 11/26] Fix incorrect version on resource parameter (#5813) * Fix incorrect version on resource parameter * Add dstu3 test * Fix tests * Remove failing IEmailSender bean * Add tests * Update to 3.3.1 --- .../5812-fix-incorrect-resource-version.yaml | 4 + .../fhir/jpa/test/config/TestDstu3Config.java | 7 - .../PlanDefinitionApplyProvider.java | 4 +- ...ivityDefinitionOperationsProviderTest.java | 46 ++ .../fhir/cr/dstu3/BaseCrDstu3TestServer.java | 20 +- ...java => MeasureOperationProviderTest.java} | 4 +- .../PlanDefinitionOperationsProviderTest.java | 41 ++ .../uhn/fhir/cr/dstu3/TestCrDstu3Config.java | 7 +- ...viderIT.java => CareGapsProviderTest.java} | 2 +- ...java => MeasureOperationProviderTest.java} | 4 +- .../ca/uhn/fhir/cr/r4/TestCrR4Config.java | 2 - .../resources/Exm104FhirR4MeasureBundle.json | 6 +- .../test/resources/Exm105Fhir3Measure.json | 24 +- .../Exm105FhirR3MeasurePartBundle.json | 24 +- .../dstu3/Bundle-ActivityDefinitionTest.json | 78 +++ .../EXM104-FHIR3-8.1.000-bundle.json | 20 +- .../EXM124-FHIR3-7.2.000-bundle.json | 20 +- .../hello-world/hello-world-careplan.json | 55 +++ .../hello-world/hello-world-patient-data.json | 459 ++++++++++++++++++ .../hello-world-patient-view-bundle.json | 214 ++++++++ pom.xml | 2 +- 21 files changed, 934 insertions(+), 109 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5812-fix-incorrect-resource-version.yaml create mode 100644 hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/ActivityDefinitionOperationsProviderTest.java rename hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/{CrDstu3MeasureOperationProviderIT.java => MeasureOperationProviderTest.java} (96%) create mode 100644 hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/PlanDefinitionOperationsProviderTest.java rename hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/{CareGapsProviderIT.java => CareGapsProviderTest.java} (98%) rename hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/{R4MeasureOperationProviderIT.java => MeasureOperationProviderTest.java} (99%) create mode 100644 hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/Bundle-ActivityDefinitionTest.json create mode 100644 hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-careplan.json create mode 100644 hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-data.json create mode 100644 hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-view-bundle.json diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5812-fix-incorrect-resource-version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5812-fix-incorrect-resource-version.yaml new file mode 100644 index 00000000000..a2b12aa1118 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5812-fix-incorrect-resource-version.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5812 +title: "Previously, the 'planDefinition' resource parameter for the Dstu3 version of PlanDefinition/$apply was incorrectly defined as an R4 resource. This issue has been fixed." diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestDstu3Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestDstu3Config.java index 05a7a223779..a903603d3b9 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestDstu3Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestDstu3Config.java @@ -175,13 +175,6 @@ public class TestDstu3Config { return dataSource; } - @Bean - public IEmailSender emailSender() { - final MailConfig mailConfig = new MailConfig().setSmtpHostname("localhost").setSmtpPort(3025); - final IMailSvc mailSvc = new MailSvc(mailConfig); - return new EmailSenderImpl(mailSvc); - } - @Bean public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext, JpaStorageSettings theStorageSettings) { LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext, theStorageSettings); diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionApplyProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionApplyProvider.java index e0224ba828e..972a79535b6 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionApplyProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionApplyProvider.java @@ -88,7 +88,7 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource apply( @IdParam IdType theId, - @OperationParam(name = "planDefinition") org.hl7.fhir.r4.model.PlanDefinition thePlanDefinition, + @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "url") String theUrl, @OperationParam(name = "version") String theVersion, @@ -134,7 +134,7 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource apply( - @OperationParam(name = "planDefinition") org.hl7.fhir.r4.model.PlanDefinition thePlanDefinition, + @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "url") String theUrl, @OperationParam(name = "version") String theVersion, diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/ActivityDefinitionOperationsProviderTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/ActivityDefinitionOperationsProviderTest.java new file mode 100644 index 00000000000..71692e2fac1 --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/ActivityDefinitionOperationsProviderTest.java @@ -0,0 +1,46 @@ +package ca.uhn.fhir.cr.dstu3; + + +import ca.uhn.fhir.cr.dstu3.activitydefinition.ActivityDefinitionApplyProvider; +import org.hl7.fhir.dstu3.model.BooleanType; +import org.hl7.fhir.dstu3.model.IdType; +import org.hl7.fhir.dstu3.model.ProcedureRequest; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ActivityDefinitionOperationsProviderTest extends BaseCrDstu3TestServer { + + + @Autowired + ActivityDefinitionApplyProvider myActivityDefinitionApplyProvider; + @Test + void testActivityDefinitionApply() { + loadBundle("ca/uhn/fhir/cr/dstu3/Bundle-ActivityDefinitionTest.json"); + var requestDetails = setupRequestDetails(); + var result = myActivityDefinitionApplyProvider.apply( + new IdType("activityDefinition-test"), + null, + null, + "patient-1", + null, + null, + null, + null, + null, + null, + null, + null, + null, + new BooleanType(true), + null, + null, + null, + null, + requestDetails); + assertTrue(result instanceof ProcedureRequest); + ProcedureRequest request = (ProcedureRequest) result; + assertTrue(request.getDoNotPerform()); + } +} diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/BaseCrDstu3TestServer.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/BaseCrDstu3TestServer.java index de10ff63f25..617aba9223a 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/BaseCrDstu3TestServer.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/BaseCrDstu3TestServer.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.cr.dstu3; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.cr.IResourceLoader; -import ca.uhn.fhir.cr.dstu3.TestCrDstu3Config; +import ca.uhn.fhir.cr.config.dstu3.ApplyOperationConfig; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; @@ -18,22 +18,20 @@ import ca.uhn.fhir.test.utilities.JettyUtil; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.eclipse.jetty.server.Server; import org.eclipse.jetty.ee10.servlet.ServletContextHandler; import org.eclipse.jetty.ee10.servlet.ServletHolder; +import org.eclipse.jetty.server.Server; import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.Resource; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.test.context.ContextConfiguration; import java.util.List; import java.util.concurrent.TimeUnit; -@ContextConfiguration(classes = {TestCrDstu3Config.class}) +@ContextConfiguration(classes = {TestCrDstu3Config.class, ApplyOperationConfig.class}) public abstract class BaseCrDstu3TestServer extends BaseJpaDstu3Test implements IResourceLoader { public static IGenericClient ourClient; @@ -46,22 +44,12 @@ public abstract class BaseCrDstu3TestServer extends BaseJpaDstu3Test implements @Autowired protected DaoRegistry myDaoRegistry; - @Autowired - ApplicationContext myApplicationContext; private SimpleRequestHeaderInterceptor mySimpleHeaderInterceptor; - - @SuppressWarnings("deprecation") - @AfterEach - public void after() { - ourClient.unregisterInterceptor(mySimpleHeaderInterceptor); - myStorageSettings.setIndexMissingFields(new JpaStorageSettings().getIndexMissingFields()); - } @Autowired RestfulServer ourRestfulServer; @BeforeEach public void beforeStartServer() throws Exception { - ourServer = new Server(0); ServletContextHandler proxyHandler = new ServletContextHandler(); @@ -97,7 +85,6 @@ public abstract class BaseCrDstu3TestServer extends BaseJpaDstu3Test implements mySimpleHeaderInterceptor = new SimpleRequestHeaderInterceptor(); ourClient.registerInterceptor(mySimpleHeaderInterceptor); myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED); - } @Override @@ -114,7 +101,6 @@ public abstract class BaseCrDstu3TestServer extends BaseJpaDstu3Test implements return loadBundle(Bundle.class, theLocation); } - public Bundle makeBundle(List theResources) { return makeBundle(theResources.toArray(new Resource[theResources.size()])); } diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/CrDstu3MeasureOperationProviderIT.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/MeasureOperationProviderTest.java similarity index 96% rename from hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/CrDstu3MeasureOperationProviderIT.java rename to hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/MeasureOperationProviderTest.java index 6232949ded8..45d9161dfca 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/CrDstu3MeasureOperationProviderIT.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/MeasureOperationProviderTest.java @@ -28,8 +28,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; @ContextConfiguration(classes = {TestCrDstu3Config.class}) -public class CrDstu3MeasureOperationProviderIT extends BaseCrDstu3TestServer { - private static final Logger ourLog = LoggerFactory.getLogger(CrDstu3MeasureOperationProviderIT.class); +public class MeasureOperationProviderTest extends BaseCrDstu3TestServer { + private static final Logger ourLog = LoggerFactory.getLogger(MeasureOperationProviderTest.class); @Autowired MeasureOperationsProvider myMeasureOperationsProvider; diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/PlanDefinitionOperationsProviderTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/PlanDefinitionOperationsProviderTest.java new file mode 100644 index 00000000000..c8b493b57ad --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/PlanDefinitionOperationsProviderTest.java @@ -0,0 +1,41 @@ +package ca.uhn.fhir.cr.dstu3; + +import ca.uhn.fhir.cr.dstu3.plandefinition.PlanDefinitionApplyProvider; +import org.hl7.fhir.dstu3.model.BooleanType; +import org.hl7.fhir.dstu3.model.CarePlan; +import org.hl7.fhir.dstu3.model.IdType; +import org.hl7.fhir.dstu3.model.PlanDefinition; +import org.hl7.fhir.dstu3.model.RequestGroup; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class PlanDefinitionOperationsProviderTest extends BaseCrDstu3TestServer { + @Autowired + PlanDefinitionApplyProvider myPlanDefinitionApplyProvider; + + @Test + void testApply() { + loadBundle("ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-view-bundle.json"); + loadBundle("ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-data.json"); + var planDefinition = (PlanDefinition) read(new IdType("PlanDefinition", "hello-world-patient-view")); + + var requestDetails = setupRequestDetails(); + var patientId = "Patient/helloworld-patient-1"; + var result = (CarePlan) myPlanDefinitionApplyProvider.apply(null, planDefinition, null, null, null, patientId, + null, null, null, null, null, + null, null, null, null, new BooleanType(true), null, + null, null, null, + requestDetails); + + assertNotNull(result); + assertEquals(patientId, result.getSubject().getReference()); + assertEquals(1, result.getContained().size()); + var requestGroup = (RequestGroup) result.getContained().get(0); + assertEquals(1, requestGroup.getAction().size()); + var action = requestGroup.getAction().get(0); + assertEquals("Hello World!", action.getTitle()); + } +} diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/TestCrDstu3Config.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/TestCrDstu3Config.java index 67a8c1a6f82..c42317903b6 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/TestCrDstu3Config.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/dstu3/TestCrDstu3Config.java @@ -6,7 +6,6 @@ import ca.uhn.fhir.cr.TestCrConfig; import ca.uhn.fhir.cr.config.dstu3.CrDstu3Config; - import org.cqframework.cql.cql2elm.CqlCompilerOptions; import org.cqframework.cql.cql2elm.model.CompiledLibrary; import org.cqframework.cql.cql2elm.model.Model; @@ -30,9 +29,11 @@ import java.util.List; import java.util.Map; import java.util.Set; - @Configuration -@Import({TestCrConfig.class, CrDstu3Config.class}) +@Import({ + TestCrConfig.class, + CrDstu3Config.class +}) public class TestCrDstu3Config { @Bean diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CareGapsProviderIT.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CareGapsProviderTest.java similarity index 98% rename from hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CareGapsProviderIT.java rename to hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CareGapsProviderTest.java index 295e05758e1..b6911e35ae8 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CareGapsProviderIT.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CareGapsProviderTest.java @@ -38,7 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; * 6. Provider submits additional Patient data * 7. Provider invokes care-gaps (and discovers issues are closed). */ -class CareGapsProviderIT extends BaseCrR4TestServer +class CareGapsProviderTest extends BaseCrR4TestServer { diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/R4MeasureOperationProviderIT.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/MeasureOperationProviderTest.java similarity index 99% rename from hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/R4MeasureOperationProviderIT.java rename to hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/MeasureOperationProviderTest.java index 6cca834e17f..1b4e9aa7a71 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/R4MeasureOperationProviderIT.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/MeasureOperationProviderTest.java @@ -20,7 +20,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; @ExtendWith(SpringExtension.class) -class R4MeasureOperationProviderIT extends BaseCrR4TestServer { +class MeasureOperationProviderTest extends BaseCrR4TestServer { public MeasureReport runEvaluateMeasure(String periodStart, String periodEnd, String subject, String measureId, String reportType, String practitioner){ @@ -160,6 +160,4 @@ class R4MeasureOperationProviderIT extends BaseCrR4TestServer { String.format("expected count for population \"%s\" did not match", populationName) ); } - - } diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java index e142b5e7c7d..09071d6ca64 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java @@ -160,6 +160,4 @@ public class TestCrR4Config { evaluationSettings.setValueSetCache(theGlobalValueSetCache); return evaluationSettings; } - - } diff --git a/hapi-fhir-storage-cr/src/test/resources/Exm104FhirR4MeasureBundle.json b/hapi-fhir-storage-cr/src/test/resources/Exm104FhirR4MeasureBundle.json index 96f35977412..cf4f743af30 100644 --- a/hapi-fhir-storage-cr/src/test/resources/Exm104FhirR4MeasureBundle.json +++ b/hapi-fhir-storage-cr/src/test/resources/Exm104FhirR4MeasureBundle.json @@ -261,6 +261,7 @@ ], "supplementalData": [ { + "id": "sde-ethnicity", "code": { "text": "sde-ethnicity" }, @@ -280,6 +281,7 @@ } }, { + "id": "sde-payer", "code": { "text": "sde-payer" }, @@ -299,6 +301,7 @@ } }, { + "id": "sde-race", "code": { "text": "sde-race" }, @@ -318,6 +321,7 @@ } }, { + "id": "sde-sex", "code": { "text": "sde-sex" }, @@ -357,7 +361,7 @@ ], "url": "http://localhost:8080/fhir/Library/EXM104", "version": "8.2.000", - "name": "library-EXM104", + "name": "EXM104", "status": "active", "experimental": true, "type": { diff --git a/hapi-fhir-storage-cr/src/test/resources/Exm105Fhir3Measure.json b/hapi-fhir-storage-cr/src/test/resources/Exm105Fhir3Measure.json index cc0cd100d6c..5cbb2deec54 100644 --- a/hapi-fhir-storage-cr/src/test/resources/Exm105Fhir3Measure.json +++ b/hapi-fhir-storage-cr/src/test/resources/Exm105Fhir3Measure.json @@ -12,10 +12,6 @@ "lastUpdated": "2019-12-04T17:52:12.092-07:00", "profile": [ "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/proportion-measure-cqfm" ] }, - "text": { - "status": "generated", - "div": "
\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n
eCQM Title

Discharged on Statin Medication

eCQM Identifier (Measure Authoring Tool)105eCQM Version number8.1.000
NQF NumberNot ApplicableGUID1f503318-bb8d-4b91-af63-223ae0a2328e
Measurement PeriodJanuary 1, 20XX through December 31, 20XX
Measure StewardThe Joint Commission
Measure DeveloperThe Joint Commission
Endorsed ByNone
Description\r\n
\r\n
Ischemic stroke patients who are prescribed or continuing to take statin medication at hospital discharge
\r\n
Copyright\r\n
\r\n
LOINC(R) copyright 2004-2018 Regenstrief Institute, Inc. \r\n                This material contains SNOMED Clinical Terms(R) (SNOMED CT[R]) copyright 2004-2018 International Health Terminology Standards Development Organisation. All Rights Reserved.\r\n                \r\n                Measure specifications are in the Public Domain.
\r\n
Disclaimer\r\n
\r\n
These performance measures are not clinical guidelines and do not establish a standard of medical care, and have not been tested for all potential applications. The measures and specifications are provided without warranty.
\r\n
Measure ScoringProportion
Measure TypeProcess
Stratification\r\n
\r\n
None
\r\n
Risk Adjustment\r\n
\r\n
None
\r\n
Rate Aggregation\r\n
\r\n
None
\r\n
Rationale\r\n
\r\n
There is an extensive and consistent body of evidence supporting the use of statins for secondary prevention in patients with clinically evident atherosclerotic cardiovascular disease (ASCVD), which includes individuals with ischemic stroke due to large artery atherosclerosis, individuals with ischemic stroke due to intrinsic small vessel disease, and individuals with ischemic stroke not directly due to atherosclerosis but with clinically evident atherosclerotic disease in an uninvolved cerebral or noncerebral bed.  Both women and men with clinical ASCVD are at increased risk for recurrent ASCVD and ASCVD death.  High-intensity statin therapy should be initiated or continued as first-line therapy in women and men less than or equal to 75 years of age who have clinical ASCVD, unless contraindicated. In patients with clinical ASCVD and a contraindication to high-intensity statin therapy, moderate-intensity therapy should be considered as an alternative if it can be tolerated.  In individuals greater than 75 years of age, the potential for ASCVD risk reduction benefits, adverse effects, drug-drug interactions, and patient preferences should be considered, and statin therapy individualized based on these considerations (Stone, 2013).
\r\n
Clinical Recommendation Statement\r\n
\r\n
For patients with stroke of atherosclerotic origin, intensive lipid lowering therapy with statins should be initiated
\r\n
Improvement Notation\r\n
\r\n
Improvement noted as an increase in rate
\r\n
Reference\r\n
\r\n
Amarenco, P., Bogousslavsky, J., Callahan, A., III, et al. (2006, August 10). High-dose atorvastatin after stroke or transient ischemic attack. New England Journal of Medicine, 355(6), 549-559. 
\r\n
Reference\r\n
\r\n
Biffi, A., Devan, W. J., Anderson, C. D., et al. (2011, May). Statin treatment and functional outcome after ischemic stroke: Case-control and meta-analysis. Stroke, 42(5), 1314-1319. 
\r\n
Reference\r\n
\r\n
Centers for Disease Control and Prevention. (2009, May 1). Prevalence and most common causes of disability among adults—United States, 2005. Morbidity and Mortality Weekly Report, 58(16), 421-426. 
\r\n
Reference\r\n
\r\n
Chan, P. S., Nallamothu, B. K., Gurm, H. S., et al. (2007, May 8). Incremental benefit and cost-effectiveness of high-dose statin therapy in high-risk patients with coronary artery disease. Circulation, 115(18), 2398-2409. 
\r\n
Reference\r\n
\r\n
Culver, A. L., Ockene, I. S., Balasubramanian, R., et al. (2012, January 23). Statin use and risk of diabetes mellitus in postmenopausal women in the Women's Health Initiative. Archives of Internal Medicine, 172(2),  144-152. 
\r\n
Reference\r\n
\r\n
Feher, A., Pusch, G., Koltai, K., et al. (2011, April 14). Statin therapy in the primary and the secondary prevention of ischaemic cerebrovascular diseases. International Journal of Cardiology, 148(2), 131-138. 
\r\n
Reference\r\n
\r\n
Grundy, S. M., Cleeman, J. I., Merz, C. N., et al. (2004, July 13). Implications of recent clinical trials for the National Cholesterol Education Program Adult Treatment Panel III Guidelines. Circulation, 110(2), 227-239. 
\r\n
Reference\r\n
\r\n
Kernan, W. N., Ovbiagele, B., Black, H. R., et al. (2014, May). Guidelines for the prevention of stroke in patients with stroke and transient ischemic attack: A guideline for health care professionals from the American Heart Association/American Stroke Association. Stroke, 45(7), 2160-2223. 
\r\n
Reference\r\n
\r\n
Kostis, W. J., Cheng, J. Q., Dobrzynski, J. M., et al. (2012, February 7). Meta-analysis of statin effects in women versus men. Journal of the American College of Cardiology, 59(6), 572-582.
\r\n
Reference\r\n
\r\n
Lazar, L. D., Pletcher, M. J., Coxson, P. G., et al. (2011, July 12). Cost-effectiveness of statin therapy for primary prevention in a low-cost statin era. Circulation, 124(2), 146-153.
\r\n
Reference\r\n
\r\n
Mitka, M. (2012, March 7). Some question use of statins to reduce cardiovascular risks in healthy women. JAMA, 307(9), 893-894. 
\r\n
Reference\r\n
\r\n
National Cholesterol Education Program Expert Panel on Detection, Evaluation, and Adults Treatment of High Blood Cholesterol in Adults. (2002, December 17). Third report of the National Cholesterol Education Program (NCEP) Expert Panel on Detection, Evaluation, and Treatment of High Blood Cholesterol in Adults (Adult Treatment Panel III)—Final report. Circulation, 106(25), 3143-3421. 
\r\n
Reference\r\n
\r\n
Powers, W. J., Rabinstein, A. A., Ackerson, T., et al. (2018, January). 2018 guidelines for the early management of patients with acute ischemic stroke: A guideline for health care professionals from the American Heart Association/American Stroke Association. Stroke, 49, e44-e47. 
\r\n
Reference\r\n
\r\n
Rodriguez-Yanez, M., Agulla, J., Rodriguez-Gonzalez, R., et al. (2008, June). Statins and stroke. Therapeutic Advances in Cardiovascular Disease, 2(3), 157-166. 
\r\n
Reference\r\n
\r\n
Roger, V. L., Go, A. S., Lloyd-Jones, D. M., et al. (2012, January 3). Heart disease and stroke statistics—2012 update: A report from the American Heart Association. Circulation, 125(1), e2-e220. 
\r\n
Reference\r\n
\r\n
Schellinger, P. D., Bryan, R. N., Caplan, L. R., et al. (2010, July 13). Evidence-based guideline: The role of diffusion and perfusion MRI for the diagnosis of acute ischemic stroke: Report of the Therapeutics and Technology Assessment Subcommittee of the American Academy of Neurology.  Neurology, 75(2), 177-185.
\r\n
Reference\r\n
\r\n
Squizzato, A., Romualdi, E., Dentali, F., et al. (2011). Statins for acute ischemic stroke. Cochrane Database of Systematic Reviews, 8, CD007551. 
\r\n
Reference\r\n
\r\n
Stone, N. J., Robinson, J., Lichtenstein, A. H., et al. (2013, November). Guideline on the treatment of blood cholesterol to reduce atherosclerotic cardiovascular disease in adults: A report of the American College of Cardiology/American Heart Association Task Force on Practice Guidelines. Circulation, 11, 1-84. 
\r\n
Reference\r\n
\r\n
Van Dis, F. J., Keilson, L. M., Rundell, C. A., et al. (1996, June). Direct measurement of serum low-density lipoprotein cholesterol in patients with acute myocardial infarction on admission to the emergency room. American Journal of Cardiology, 77(14), 1232-1234. 
\r\n
Reference\r\n
\r\n
Weiss, R., Harder, M., %26amp; Rowe, J. (2003, May). The relationship between nonfasting and fasting lipid measurements in patients with or without type 2 diabetes mellitus receiving treatment with 3-hydroxy-3-methylglutaryl-coenzyme a reductase inhibitors. Clinical Therapeutics, 25(5), 1490-1497. 
\r\n
Definition\r\n
\r\n
None
\r\n
Guidance\r\n
\r\n
The "Non-elective Inpatient Encounter" value set intends to capture all non-scheduled hospitalizations. This value set is a subset of the "Inpatient Encounter" value set, excluding concepts that specifically refer to elective hospital admissions. Non-elective Inpatient Encounters include emergency, urgent, and unplanned admissions.\r\n                \r\n                The "Medication, Discharge" datatype refers to the discharge medication list and is intended to express medications ordered for post-discharge use.
\r\n
Transmission Format\r\n
\r\n
TBD
\r\n
Initial Population\r\n
\r\n
Inpatient hospitalizations for patients age 18 and older, discharged from inpatient care (non-elective admissions) with a principal diagnosis of ischemic or hemorrhagic stroke and a length of stay less than or equal to 120 days that ends during measurement period
\r\n
Denominator\r\n
\r\n
Inpatient hospitalizations for patients with a principal diagnosis of ischemic stroke
\r\n
Denominator Exclusions\r\n
\r\n
Inpatient hospitalizations for patients admitted for elective carotid intervention. This exclusion is implicitly modeled by only including non-elective hospitalizations.\r\n                Inpatient hospitalizations for patients discharged to another hospital\r\n                Inpatient hospitalizations for patients who left against medical advice\r\n                Inpatient hospitalizations for patients who expired\r\n                Inpatient hospitalizations for patients discharged to home for hospice care\r\n                Inpatient hospitalizations for patients discharged to a health care facility for hospice care\r\n                Inpatient hospitalizations for patients with comfort measures documented
\r\n
Numerator\r\n
\r\n
Inpatient hospitalizations for patients prescribed or continuing to take statin medication at hospital discharge
\r\n
Numerator Exclusions\r\n
\r\n
Not Applicable
\r\n
Denominator Exceptions\r\n
\r\n
Inpatient hospitalizations for patients with a reason for not prescribing statin medication at discharge\r\n                Inpatient hospitalizations for patients with a maximum LDL-c result of  less than 70 mg/dL %26lt;= 30 days prior to arrival or any time during the hospital stay\r\n                Inpatient hospitalizations for patients with a statin allergy.
\r\n
Supplemental Data Elements\r\n
\r\n
For every patient evaluated by this measure also identify payer, race, ethnicity and sex
\r\n
\r\n

Table of Contents

\r\n \r\n
\r\n

Population Criteria

\r\n
\r\n
    \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Encounter with Principal Diagnosis and Age"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounters with Discharge Disposition"\r\n                union TJC."Comfort Measures during Hospitalization"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                with "Statin at Discharge" DischargeStatin\r\n                    such that DischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n None\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Not Given at Discharge" NoDischargeStatin\r\n                    such that NoDischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Allergy" StatinAllergy\r\n                        such that StatinAllergy.prevalencePeriod starts on or before end of IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union "Encounter with Max LDL less than 70 mg per dL"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n None\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Definitions

\r\n
\r\n
    \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Not Given at Discharge" NoDischargeStatin\r\n                    such that NoDischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Allergy" StatinAllergy\r\n                        such that StatinAllergy.prevalencePeriod starts on or before end of IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union "Encounter with Max LDL less than 70 mg per dL"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounters with Discharge Disposition"\r\n                union TJC."Comfort Measures during Hospitalization"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                where Max(["Laboratory Test, Performed": "LDL-c"] Ldl\r\n                    where Ldl.resultDatetime during Interval[Global."ToDate"(start of IschemicStrokeEncounter.relevantPeriod - 30 days), end of IschemicStrokeEncounter.relevantPeriod]\r\n                    return Ldl.result as Quantity\r\n                )%26lt; 70 'mg/dL'
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Encounter with Principal Diagnosis and Age"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                with "Statin at Discharge" DischargeStatin\r\n                    such that DischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Ethnicity": "Ethnicity"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Payer": "Payer"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Race": "Race"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Sex": "ONC Administrative Sex"] 
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Allergy/Intolerance": "Statin Allergen"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Medication, Discharge": "Statin Grouper"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Medication, Not Discharged": "Statin Grouper"] NoStatinDischarge\r\n                where NoStatinDischarge.negationRationale in "Medical Reason"\r\n                    or NoStatinDischarge.negationRationale in "Patient Refusal"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "Non Elective Inpatient Encounter" NonElectiveEncounter\r\n                where NonElectiveEncounter.principalDiagnosis in "Hemorrhagic Stroke"\r\n                    or NonElectiveEncounter.principalDiagnosis in "Ischemic Stroke"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                with "Intervention Comfort Measures" ComfortMeasure\r\n                    such that Coalesce(start of ComfortMeasure.relevantPeriod, ComfortMeasure.authorDatetime)during Global."HospitalizationWithObservation"(IschemicStrokeEncounter)
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "All Stroke Encounter" AllStrokeEncounter\r\n                with ["Patient Characteristic Birthdate": "Birth date"] BirthDate\r\n                    such that Global."CalendarAgeInYearsAt"(BirthDate.birthDatetime, start of AllStrokeEncounter.relevantPeriod)%26gt;= 18
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Intervention, Order": "Comfort Measures"]\r\n                union ["Intervention, Performed": "Comfort Measures"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "Encounter with Principal Diagnosis and Age" EncounterWithAge\r\n                where EncounterWithAge.principalDiagnosis in "Ischemic Stroke"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ( ( "Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    where IschemicStrokeEncounter.dischargeDisposition in "Discharge To Acute Care Facility"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Left Against Medical Advice"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Patient Expired"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Discharged to Home for Hospice Care"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Discharged to Health Care Facility for Hospice Care"\r\n                )\r\n                )
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Encounter, Performed": "Non-Elective Inpatient Encounter"] NonElectiveEncounter\r\n                where Global."LengthInDays"(NonElectiveEncounter.relevantPeriod)%26lt;= 120\r\n                    and NonElectiveEncounter.relevantPeriod ends during "Measurement Period"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Functions

\r\n
\r\n
    \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          years between ToDate(BirthDateTime)and ToDate(AsOf)
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          Encounter Visit\r\n                let ObsVisit: Last(["Encounter, Performed": "Observation Services"] LastObs\r\n                    where LastObs.relevantPeriod ends 1 hour or less on or before start of Visit.relevantPeriod\r\n                    sort by \r\n                    end of relevantPeriod\r\n                ),\r\n                VisitStart: Coalesce(start of ObsVisit.relevantPeriod, start of Visit.relevantPeriod),\r\n                EDVisit: Last(["Encounter, Performed": "Emergency Department Visit"] LastED\r\n                    where LastED.relevantPeriod ends 1 hour or less on or before VisitStart\r\n                    sort by \r\n                    end of relevantPeriod\r\n                )\r\n                return Interval[Coalesce(start of EDVisit.relevantPeriod, VisitStart), \r\n                end of Visit.relevantPeriod]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          difference in days between start of Value and end of Value
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          DateTime(year from Value, month from Value, day from Value, 0, 0, 0, 0, timezone from Value)
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Terminology

\r\n
\r\n
    \r\n
  • code "Birth date" ("LOINC Code (21112-8)")
  • \r\n
  • valueset "Comfort Measures" (1.3.6.1.4.1.33895.1.3.0.45)
  • \r\n
  • valueset "Discharge To Acute Care Facility" (2.16.840.1.113883.3.117.1.7.1.87)
  • \r\n
  • valueset "Discharged to Health Care Facility for Hospice Care" (2.16.840.1.113883.3.117.1.7.1.207)
  • \r\n
  • valueset "Discharged to Home for Hospice Care" (2.16.840.1.113883.3.117.1.7.1.209)
  • \r\n
  • valueset "Emergency Department Visit" (2.16.840.1.113883.3.117.1.7.1.292)
  • \r\n
  • valueset "Ethnicity" (2.16.840.1.114222.4.11.837)
  • \r\n
  • valueset "Hemorrhagic Stroke" (2.16.840.1.113883.3.117.1.7.1.212)
  • \r\n
  • valueset "Ischemic Stroke" (2.16.840.1.113883.3.117.1.7.1.247)
  • \r\n
  • valueset "LDL-c" (2.16.840.1.113883.3.117.1.7.1.215)
  • \r\n
  • valueset "Left Against Medical Advice" (2.16.840.1.113883.3.117.1.7.1.308)
  • \r\n
  • valueset "Medical Reason" (2.16.840.1.113883.3.117.1.7.1.473)
  • \r\n
  • valueset "Non-Elective Inpatient Encounter" (2.16.840.1.113883.3.117.1.7.1.424)
  • \r\n
  • valueset "Observation Services" (2.16.840.1.113762.1.4.1111.143)
  • \r\n
  • valueset "ONC Administrative Sex" (2.16.840.1.113762.1.4.1)
  • \r\n
  • valueset "Patient Expired" (2.16.840.1.113883.3.117.1.7.1.309)
  • \r\n
  • valueset "Patient Refusal" (2.16.840.1.113883.3.117.1.7.1.93)
  • \r\n
  • valueset "Payer" (2.16.840.1.114222.4.11.3591)
  • \r\n
  • valueset "Race" (2.16.840.1.114222.4.11.836)
  • \r\n
  • valueset "Statin Allergen" (2.16.840.1.113762.1.4.1110.42)
  • \r\n
  • valueset "Statin Grouper" (2.16.840.1.113762.1.4.1110.19)
  • \r\n
\r\n
\r\n

Data Criteria (QDM Data Elements)

\r\n
    \r\n
  • "Allergy/Intolerance: Statin Allergen" using "Statin Allergen (2.16.840.1.113762.1.4.1110.42)"
  • \r\n
  • "Encounter, Performed: Emergency Department Visit" using "Emergency Department Visit (2.16.840.1.113883.3.117.1.7.1.292)"
  • \r\n
  • "Encounter, Performed: Non-Elective Inpatient Encounter" using "Non-Elective Inpatient Encounter (2.16.840.1.113883.3.117.1.7.1.424)"
  • \r\n
  • "Encounter, Performed: Observation Services" using "Observation Services (2.16.840.1.113762.1.4.1111.143)"
  • \r\n
  • "Intervention, Order: Comfort Measures" using "Comfort Measures (1.3.6.1.4.1.33895.1.3.0.45)"
  • \r\n
  • "Intervention, Performed: Comfort Measures" using "Comfort Measures (1.3.6.1.4.1.33895.1.3.0.45)"
  • \r\n
  • "Laboratory Test, Performed: LDL-c" using "LDL-c (2.16.840.1.113883.3.117.1.7.1.215)"
  • \r\n
  • "Medication, Discharge: Statin Grouper" using "Statin Grouper (2.16.840.1.113762.1.4.1110.19)"
  • \r\n
  • "Medication, Not Discharged: Statin Grouper" using "Statin Grouper (2.16.840.1.113762.1.4.1110.19)"
  • \r\n
  • "Patient Characteristic Ethnicity: Ethnicity" using "Ethnicity (2.16.840.1.114222.4.11.837)"
  • \r\n
  • "Patient Characteristic Payer: Payer" using "Payer (2.16.840.1.114222.4.11.3591)"
  • \r\n
  • "Patient Characteristic Race: Race" using "Race (2.16.840.1.114222.4.11.836)"
  • \r\n
  • "Patient Characteristic Sex: ONC Administrative Sex" using "ONC Administrative Sex (2.16.840.1.113762.1.4.1)"
  • \r\n
  • "Patient Characteristic Birthdate: Birth date" using "Birth date (LOINC Code 21112-8)"
  • \r\n
\r\n

Supplemental Data Elements

\r\n
\r\n
    \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Ethnicity": "Ethnicity"]
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Payer": "Payer"]
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Race": "Race"]
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Sex": "ONC Administrative Sex"] 
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Risk Adjustment Variables

\r\n
\r\n
    \r\n
  • None
  • \r\n
\r\n
\r\n
\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n
Measure Set\r\n
\r\n
eMeasure Stroke (eSTK)
\r\n
" - }, "extension": [ { "url": "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/cqfm-populationBasis", "valueCode": "Encounter" @@ -184,13 +180,9 @@ "improvementNotation": "increase", "guidance": "The \"Non-elective Inpatient Encounter\" value set intends to capture all non-scheduled hospitalizations. This value set is a subset of the \"Inpatient encounter\" value set, excluding concepts that specifically refer to elective hospital admissions. Non-elective admissions include emergency, urgent and unplanned admissions.\n\nThe \"Medication, Discharge\" datatype refers to the discharge medication list and is intended to express medications ordered for post-discharge use.", "group": [ { - "identifier": { - "value": "group-1" - }, + "id":"group-1", "population": [ { - "identifier": { - "value": "initial-population" - }, + "id": "initial-population", "code": { "coding": [ { "code": "initial-population" @@ -198,9 +190,7 @@ }, "criteria": "Initial Population" }, { - "identifier": { - "value": "numerator" - }, + "id": "numerator", "code": { "coding": [ { "code": "numerator" @@ -208,9 +198,7 @@ }, "criteria": "Numerator" }, { - "identifier": { - "value": "denominator" - }, + "id": "denominator", "code": { "coding": [ { "code": "denominator" @@ -218,9 +206,7 @@ }, "criteria": "Denominator" }, { - "identifier": { - "value": "denominator-exclusion" - }, + "id": "denominator-exclusion", "code": { "coding": [ { "code": "denominator-exclusion" diff --git a/hapi-fhir-storage-cr/src/test/resources/Exm105FhirR3MeasurePartBundle.json b/hapi-fhir-storage-cr/src/test/resources/Exm105FhirR3MeasurePartBundle.json index 78623883206..14216d59953 100644 --- a/hapi-fhir-storage-cr/src/test/resources/Exm105FhirR3MeasurePartBundle.json +++ b/hapi-fhir-storage-cr/src/test/resources/Exm105FhirR3MeasurePartBundle.json @@ -14,10 +14,6 @@ "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/proportion-measure-cqfm" ] }, - "text": { - "status": "generated", - "div": "
\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n
eCQM Title

Discharged on Statin Medication

eCQM Identifier (Measure Authoring Tool)105eCQM Version number8.1.000
NQF NumberNot ApplicableGUID1f503318-bb8d-4b91-af63-223ae0a2328e
Measurement PeriodJanuary 1, 20XX through December 31, 20XX
Measure StewardThe Joint Commission
Measure DeveloperThe Joint Commission
Endorsed ByNone
Description\r\n
\r\n
Ischemic stroke patients who are prescribed or continuing to take statin medication at hospital discharge
\r\n
Copyright\r\n
\r\n
LOINC(R) copyright 2004-2018 Regenstrief Institute, Inc. \r\n                This material contains SNOMED Clinical Terms(R) (SNOMED CT[R]) copyright 2004-2018 International Health Terminology Standards Development Organisation. All Rights Reserved.\r\n                \r\n                Measure specifications are in the Public Domain.
\r\n
Disclaimer\r\n
\r\n
These performance measures are not clinical guidelines and do not establish a standard of medical care, and have not been tested for all potential applications. The measures and specifications are provided without warranty.
\r\n
Measure ScoringProportion
Measure TypeProcess
Stratification\r\n
\r\n
None
\r\n
Risk Adjustment\r\n
\r\n
None
\r\n
Rate Aggregation\r\n
\r\n
None
\r\n
Rationale\r\n
\r\n
There is an extensive and consistent body of evidence supporting the use of statins for secondary prevention in patients with clinically evident atherosclerotic cardiovascular disease (ASCVD), which includes individuals with ischemic stroke due to large artery atherosclerosis, individuals with ischemic stroke due to intrinsic small vessel disease, and individuals with ischemic stroke not directly due to atherosclerosis but with clinically evident atherosclerotic disease in an uninvolved cerebral or noncerebral bed.  Both women and men with clinical ASCVD are at increased risk for recurrent ASCVD and ASCVD death.  High-intensity statin therapy should be initiated or continued as first-line therapy in women and men less than or equal to 75 years of age who have clinical ASCVD, unless contraindicated. In patients with clinical ASCVD and a contraindication to high-intensity statin therapy, moderate-intensity therapy should be considered as an alternative if it can be tolerated.  In individuals greater than 75 years of age, the potential for ASCVD risk reduction benefits, adverse effects, drug-drug interactions, and patient preferences should be considered, and statin therapy individualized based on these considerations (Stone, 2013).
\r\n
Clinical Recommendation Statement\r\n
\r\n
For patients with stroke of atherosclerotic origin, intensive lipid lowering therapy with statins should be initiated
\r\n
Improvement Notation\r\n
\r\n
Improvement noted as an increase in rate
\r\n
Reference\r\n
\r\n
Amarenco, P., Bogousslavsky, J., Callahan, A., III, et al. (2006, August 10). High-dose atorvastatin after stroke or transient ischemic attack. New England Journal of Medicine, 355(6), 549-559. 
\r\n
Reference\r\n
\r\n
Biffi, A., Devan, W. J., Anderson, C. D., et al. (2011, May). Statin treatment and functional outcome after ischemic stroke: Case-control and meta-analysis. Stroke, 42(5), 1314-1319. 
\r\n
Reference\r\n
\r\n
Centers for Disease Control and Prevention. (2009, May 1). Prevalence and most common causes of disability among adults—United States, 2005. Morbidity and Mortality Weekly Report, 58(16), 421-426. 
\r\n
Reference\r\n
\r\n
Chan, P. S., Nallamothu, B. K., Gurm, H. S., et al. (2007, May 8). Incremental benefit and cost-effectiveness of high-dose statin therapy in high-risk patients with coronary artery disease. Circulation, 115(18), 2398-2409. 
\r\n
Reference\r\n
\r\n
Culver, A. L., Ockene, I. S., Balasubramanian, R., et al. (2012, January 23). Statin use and risk of diabetes mellitus in postmenopausal women in the Women's Health Initiative. Archives of Internal Medicine, 172(2),  144-152. 
\r\n
Reference\r\n
\r\n
Feher, A., Pusch, G., Koltai, K., et al. (2011, April 14). Statin therapy in the primary and the secondary prevention of ischaemic cerebrovascular diseases. International Journal of Cardiology, 148(2), 131-138. 
\r\n
Reference\r\n
\r\n
Grundy, S. M., Cleeman, J. I., Merz, C. N., et al. (2004, July 13). Implications of recent clinical trials for the National Cholesterol Education Program Adult Treatment Panel III Guidelines. Circulation, 110(2), 227-239. 
\r\n
Reference\r\n
\r\n
Kernan, W. N., Ovbiagele, B., Black, H. R., et al. (2014, May). Guidelines for the prevention of stroke in patients with stroke and transient ischemic attack: A guideline for health care professionals from the American Heart Association/American Stroke Association. Stroke, 45(7), 2160-2223. 
\r\n
Reference\r\n
\r\n
Kostis, W. J., Cheng, J. Q., Dobrzynski, J. M., et al. (2012, February 7). Meta-analysis of statin effects in women versus men. Journal of the American College of Cardiology, 59(6), 572-582.
\r\n
Reference\r\n
\r\n
Lazar, L. D., Pletcher, M. J., Coxson, P. G., et al. (2011, July 12). Cost-effectiveness of statin therapy for primary prevention in a low-cost statin era. Circulation, 124(2), 146-153.
\r\n
Reference\r\n
\r\n
Mitka, M. (2012, March 7). Some question use of statins to reduce cardiovascular risks in healthy women. JAMA, 307(9), 893-894. 
\r\n
Reference\r\n
\r\n
National Cholesterol Education Program Expert Panel on Detection, Evaluation, and Adults Treatment of High Blood Cholesterol in Adults. (2002, December 17). Third report of the National Cholesterol Education Program (NCEP) Expert Panel on Detection, Evaluation, and Treatment of High Blood Cholesterol in Adults (Adult Treatment Panel III)—Final report. Circulation, 106(25), 3143-3421. 
\r\n
Reference\r\n
\r\n
Powers, W. J., Rabinstein, A. A., Ackerson, T., et al. (2018, January). 2018 guidelines for the early management of patients with acute ischemic stroke: A guideline for health care professionals from the American Heart Association/American Stroke Association. Stroke, 49, e44-e47. 
\r\n
Reference\r\n
\r\n
Rodriguez-Yanez, M., Agulla, J., Rodriguez-Gonzalez, R., et al. (2008, June). Statins and stroke. Therapeutic Advances in Cardiovascular Disease, 2(3), 157-166. 
\r\n
Reference\r\n
\r\n
Roger, V. L., Go, A. S., Lloyd-Jones, D. M., et al. (2012, January 3). Heart disease and stroke statistics—2012 update: A report from the American Heart Association. Circulation, 125(1), e2-e220. 
\r\n
Reference\r\n
\r\n
Schellinger, P. D., Bryan, R. N., Caplan, L. R., et al. (2010, July 13). Evidence-based guideline: The role of diffusion and perfusion MRI for the diagnosis of acute ischemic stroke: Report of the Therapeutics and Technology Assessment Subcommittee of the American Academy of Neurology.  Neurology, 75(2), 177-185.
\r\n
Reference\r\n
\r\n
Squizzato, A., Romualdi, E., Dentali, F., et al. (2011). Statins for acute ischemic stroke. Cochrane Database of Systematic Reviews, 8, CD007551. 
\r\n
Reference\r\n
\r\n
Stone, N. J., Robinson, J., Lichtenstein, A. H., et al. (2013, November). Guideline on the treatment of blood cholesterol to reduce atherosclerotic cardiovascular disease in adults: A report of the American College of Cardiology/American Heart Association Task Force on Practice Guidelines. Circulation, 11, 1-84. 
\r\n
Reference\r\n
\r\n
Van Dis, F. J., Keilson, L. M., Rundell, C. A., et al. (1996, June). Direct measurement of serum low-density lipoprotein cholesterol in patients with acute myocardial infarction on admission to the emergency room. American Journal of Cardiology, 77(14), 1232-1234. 
\r\n
Reference\r\n
\r\n
Weiss, R., Harder, M., %26amp; Rowe, J. (2003, May). The relationship between nonfasting and fasting lipid measurements in patients with or without type 2 diabetes mellitus receiving treatment with 3-hydroxy-3-methylglutaryl-coenzyme a reductase inhibitors. Clinical Therapeutics, 25(5), 1490-1497. 
\r\n
Definition\r\n
\r\n
None
\r\n
Guidance\r\n
\r\n
The "Non-elective Inpatient Encounter" value set intends to capture all non-scheduled hospitalizations. This value set is a subset of the "Inpatient Encounter" value set, excluding concepts that specifically refer to elective hospital admissions. Non-elective Inpatient Encounters include emergency, urgent, and unplanned admissions.\r\n                \r\n                The "Medication, Discharge" datatype refers to the discharge medication list and is intended to express medications ordered for post-discharge use.
\r\n
Transmission Format\r\n
\r\n
TBD
\r\n
Initial Population\r\n
\r\n
Inpatient hospitalizations for patients age 18 and older, discharged from inpatient care (non-elective admissions) with a principal diagnosis of ischemic or hemorrhagic stroke and a length of stay less than or equal to 120 days that ends during measurement period
\r\n
Denominator\r\n
\r\n
Inpatient hospitalizations for patients with a principal diagnosis of ischemic stroke
\r\n
Denominator Exclusions\r\n
\r\n
Inpatient hospitalizations for patients admitted for elective carotid intervention. This exclusion is implicitly modeled by only including non-elective hospitalizations.\r\n                Inpatient hospitalizations for patients discharged to another hospital\r\n                Inpatient hospitalizations for patients who left against medical advice\r\n                Inpatient hospitalizations for patients who expired\r\n                Inpatient hospitalizations for patients discharged to home for hospice care\r\n                Inpatient hospitalizations for patients discharged to a health care facility for hospice care\r\n                Inpatient hospitalizations for patients with comfort measures documented
\r\n
Numerator\r\n
\r\n
Inpatient hospitalizations for patients prescribed or continuing to take statin medication at hospital discharge
\r\n
Numerator Exclusions\r\n
\r\n
Not Applicable
\r\n
Denominator Exceptions\r\n
\r\n
Inpatient hospitalizations for patients with a reason for not prescribing statin medication at discharge\r\n                Inpatient hospitalizations for patients with a maximum LDL-c result of  less than 70 mg/dL %26lt;= 30 days prior to arrival or any time during the hospital stay\r\n                Inpatient hospitalizations for patients with a statin allergy.
\r\n
Supplemental Data Elements\r\n
\r\n
For every patient evaluated by this measure also identify payer, race, ethnicity and sex
\r\n
\r\n

Table of Contents

\r\n \r\n
\r\n

Population Criteria

\r\n
\r\n
    \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Encounter with Principal Diagnosis and Age"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounters with Discharge Disposition"\r\n                union TJC."Comfort Measures during Hospitalization"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                with "Statin at Discharge" DischargeStatin\r\n                    such that DischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n None\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Not Given at Discharge" NoDischargeStatin\r\n                    such that NoDischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Allergy" StatinAllergy\r\n                        such that StatinAllergy.prevalencePeriod starts on or before end of IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union "Encounter with Max LDL less than 70 mg per dL"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
        \r\n
      • \r\n
        \r\n None\r\n
      • \r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Definitions

\r\n
\r\n
    \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Not Given at Discharge" NoDischargeStatin\r\n                    such that NoDischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union ( TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    with "Statin Allergy" StatinAllergy\r\n                        such that StatinAllergy.prevalencePeriod starts on or before end of IschemicStrokeEncounter.relevantPeriod\r\n                )\r\n                union "Encounter with Max LDL less than 70 mg per dL"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounters with Discharge Disposition"\r\n                union TJC."Comfort Measures during Hospitalization"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                where Max(["Laboratory Test, Performed": "LDL-c"] Ldl\r\n                    where Ldl.resultDatetime during Interval[Global."ToDate"(start of IschemicStrokeEncounter.relevantPeriod - 30 days), end of IschemicStrokeEncounter.relevantPeriod]\r\n                    return Ldl.result as Quantity\r\n                )%26lt; 70 'mg/dL'
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Encounter with Principal Diagnosis and Age"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          TJC."Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                with "Statin at Discharge" DischargeStatin\r\n                    such that DischargeStatin.authorDatetime during IschemicStrokeEncounter.relevantPeriod
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Ethnicity": "Ethnicity"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Payer": "Payer"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Race": "Race"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Patient Characteristic Sex": "ONC Administrative Sex"] 
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Allergy/Intolerance": "Statin Allergen"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Medication, Discharge": "Statin Grouper"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Medication, Not Discharged": "Statin Grouper"] NoStatinDischarge\r\n                where NoStatinDischarge.negationRationale in "Medical Reason"\r\n                    or NoStatinDischarge.negationRationale in "Patient Refusal"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "Non Elective Inpatient Encounter" NonElectiveEncounter\r\n                where NonElectiveEncounter.principalDiagnosis in "Hemorrhagic Stroke"\r\n                    or NonElectiveEncounter.principalDiagnosis in "Ischemic Stroke"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                with "Intervention Comfort Measures" ComfortMeasure\r\n                    such that Coalesce(start of ComfortMeasure.relevantPeriod, ComfortMeasure.authorDatetime)during Global."HospitalizationWithObservation"(IschemicStrokeEncounter)
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "All Stroke Encounter" AllStrokeEncounter\r\n                with ["Patient Characteristic Birthdate": "Birth date"] BirthDate\r\n                    such that Global."CalendarAgeInYearsAt"(BirthDate.birthDatetime, start of AllStrokeEncounter.relevantPeriod)%26gt;= 18
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Intervention, Order": "Comfort Measures"]\r\n                union ["Intervention, Performed": "Comfort Measures"]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          "Encounter with Principal Diagnosis and Age" EncounterWithAge\r\n                where EncounterWithAge.principalDiagnosis in "Ischemic Stroke"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ( ( "Ischemic Stroke Encounter" IschemicStrokeEncounter\r\n                    where IschemicStrokeEncounter.dischargeDisposition in "Discharge To Acute Care Facility"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Left Against Medical Advice"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Patient Expired"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Discharged to Home for Hospice Care"\r\n                        or IschemicStrokeEncounter.dischargeDisposition in "Discharged to Health Care Facility for Hospice Care"\r\n                )\r\n                )
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          ["Encounter, Performed": "Non-Elective Inpatient Encounter"] NonElectiveEncounter\r\n                where Global."LengthInDays"(NonElectiveEncounter.relevantPeriod)%26lt;= 120\r\n                    and NonElectiveEncounter.relevantPeriod ends during "Measurement Period"
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Functions

\r\n
\r\n
    \r\n
  • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          years between ToDate(BirthDateTime)and ToDate(AsOf)
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          Encounter Visit\r\n                let ObsVisit: Last(["Encounter, Performed": "Observation Services"] LastObs\r\n                    where LastObs.relevantPeriod ends 1 hour or less on or before start of Visit.relevantPeriod\r\n                    sort by \r\n                    end of relevantPeriod\r\n                ),\r\n                VisitStart: Coalesce(start of ObsVisit.relevantPeriod, start of Visit.relevantPeriod),\r\n                EDVisit: Last(["Encounter, Performed": "Emergency Department Visit"] LastED\r\n                    where LastED.relevantPeriod ends 1 hour or less on or before VisitStart\r\n                    sort by \r\n                    end of relevantPeriod\r\n                )\r\n                return Interval[Coalesce(start of EDVisit.relevantPeriod, VisitStart), \r\n                end of Visit.relevantPeriod]
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          difference in days between start of Value and end of Value
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
      \r\n
    • \r\n
      \r\n \r\n \r\n
        \r\n
      • \r\n
        \r\n
          \r\n
        • \r\n
          \r\n
          DateTime(year from Value, month from Value, day from Value, 0, 0, 0, 0, timezone from Value)
          \r\n
        • \r\n
        %26nbsp;\r\n
      • \r\n
      \r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Terminology

\r\n
\r\n
    \r\n
  • code "Birth date" ("LOINC Code (21112-8)")
  • \r\n
  • valueset "Comfort Measures" (1.3.6.1.4.1.33895.1.3.0.45)
  • \r\n
  • valueset "Discharge To Acute Care Facility" (2.16.840.1.113883.3.117.1.7.1.87)
  • \r\n
  • valueset "Discharged to Health Care Facility for Hospice Care" (2.16.840.1.113883.3.117.1.7.1.207)
  • \r\n
  • valueset "Discharged to Home for Hospice Care" (2.16.840.1.113883.3.117.1.7.1.209)
  • \r\n
  • valueset "Emergency Department Visit" (2.16.840.1.113883.3.117.1.7.1.292)
  • \r\n
  • valueset "Ethnicity" (2.16.840.1.114222.4.11.837)
  • \r\n
  • valueset "Hemorrhagic Stroke" (2.16.840.1.113883.3.117.1.7.1.212)
  • \r\n
  • valueset "Ischemic Stroke" (2.16.840.1.113883.3.117.1.7.1.247)
  • \r\n
  • valueset "LDL-c" (2.16.840.1.113883.3.117.1.7.1.215)
  • \r\n
  • valueset "Left Against Medical Advice" (2.16.840.1.113883.3.117.1.7.1.308)
  • \r\n
  • valueset "Medical Reason" (2.16.840.1.113883.3.117.1.7.1.473)
  • \r\n
  • valueset "Non-Elective Inpatient Encounter" (2.16.840.1.113883.3.117.1.7.1.424)
  • \r\n
  • valueset "Observation Services" (2.16.840.1.113762.1.4.1111.143)
  • \r\n
  • valueset "ONC Administrative Sex" (2.16.840.1.113762.1.4.1)
  • \r\n
  • valueset "Patient Expired" (2.16.840.1.113883.3.117.1.7.1.309)
  • \r\n
  • valueset "Patient Refusal" (2.16.840.1.113883.3.117.1.7.1.93)
  • \r\n
  • valueset "Payer" (2.16.840.1.114222.4.11.3591)
  • \r\n
  • valueset "Race" (2.16.840.1.114222.4.11.836)
  • \r\n
  • valueset "Statin Allergen" (2.16.840.1.113762.1.4.1110.42)
  • \r\n
  • valueset "Statin Grouper" (2.16.840.1.113762.1.4.1110.19)
  • \r\n
\r\n
\r\n

Data Criteria (QDM Data Elements)

\r\n
    \r\n
  • "Allergy/Intolerance: Statin Allergen" using "Statin Allergen (2.16.840.1.113762.1.4.1110.42)"
  • \r\n
  • "Encounter, Performed: Emergency Department Visit" using "Emergency Department Visit (2.16.840.1.113883.3.117.1.7.1.292)"
  • \r\n
  • "Encounter, Performed: Non-Elective Inpatient Encounter" using "Non-Elective Inpatient Encounter (2.16.840.1.113883.3.117.1.7.1.424)"
  • \r\n
  • "Encounter, Performed: Observation Services" using "Observation Services (2.16.840.1.113762.1.4.1111.143)"
  • \r\n
  • "Intervention, Order: Comfort Measures" using "Comfort Measures (1.3.6.1.4.1.33895.1.3.0.45)"
  • \r\n
  • "Intervention, Performed: Comfort Measures" using "Comfort Measures (1.3.6.1.4.1.33895.1.3.0.45)"
  • \r\n
  • "Laboratory Test, Performed: LDL-c" using "LDL-c (2.16.840.1.113883.3.117.1.7.1.215)"
  • \r\n
  • "Medication, Discharge: Statin Grouper" using "Statin Grouper (2.16.840.1.113762.1.4.1110.19)"
  • \r\n
  • "Medication, Not Discharged: Statin Grouper" using "Statin Grouper (2.16.840.1.113762.1.4.1110.19)"
  • \r\n
  • "Patient Characteristic Ethnicity: Ethnicity" using "Ethnicity (2.16.840.1.114222.4.11.837)"
  • \r\n
  • "Patient Characteristic Payer: Payer" using "Payer (2.16.840.1.114222.4.11.3591)"
  • \r\n
  • "Patient Characteristic Race: Race" using "Race (2.16.840.1.114222.4.11.836)"
  • \r\n
  • "Patient Characteristic Sex: ONC Administrative Sex" using "ONC Administrative Sex (2.16.840.1.113762.1.4.1)"
  • \r\n
  • "Patient Characteristic Birthdate: Birth date" using "Birth date (LOINC Code 21112-8)"
  • \r\n
\r\n

Supplemental Data Elements

\r\n
\r\n
    \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Ethnicity": "Ethnicity"]
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Payer": "Payer"]
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Race": "Race"]
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
  • \r\n
    \r\n \r\n \r\n
      \r\n
    • \r\n
      \r\n
        \r\n
      • \r\n
        \r\n
        ["Patient Characteristic Sex": "ONC Administrative Sex"] 
        \r\n
      • \r\n
      %26nbsp;\r\n
    • \r\n
    \r\n
  • \r\n
\r\n
\r\n

Risk Adjustment Variables

\r\n
\r\n
    \r\n
  • None
  • \r\n
\r\n
\r\n
\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n
Measure Set\r\n
\r\n
eMeasure Stroke (eSTK)
\r\n
" - }, "extension": [ { "url": "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/cqfm-populationBasis", @@ -243,14 +239,10 @@ "guidance": "The \"Non-elective Inpatient Encounter\" value set intends to capture all non-scheduled hospitalizations. This value set is a subset of the \"Inpatient encounter\" value set, excluding concepts that specifically refer to elective hospital admissions. Non-elective admissions include emergency, urgent and unplanned admissions.\n\nThe \"Medication, Discharge\" datatype refers to the discharge medication list and is intended to express medications ordered for post-discharge use.", "group": [ { - "identifier": { - "value": "group-1" - }, + "id": "group-1", "population": [ { - "identifier": { - "value": "initial-population" - }, + "id": "initial-population", "code": { "coding": [ { @@ -261,9 +253,7 @@ "criteria": "Initial Population" }, { - "identifier": { - "value": "numerator" - }, + "id": "numerator", "code": { "coding": [ { @@ -274,9 +264,7 @@ "criteria": "Numerator" }, { - "identifier": { - "value": "denominator" - }, + "id": "denominator", "code": { "coding": [ { @@ -287,9 +275,7 @@ "criteria": "Denominator" }, { - "identifier": { - "value": "denominator-exclusion" - }, + "id": "denominator-exclusion", "code": { "coding": [ { diff --git a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/Bundle-ActivityDefinitionTest.json b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/Bundle-ActivityDefinitionTest.json new file mode 100644 index 00000000000..5c3ab84d667 --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/Bundle-ActivityDefinitionTest.json @@ -0,0 +1,78 @@ +{ + "resourceType": "Bundle", + "id": "bundle-test", + "type": "transaction", + "entry" : [{ + "resource": { + "resourceType": "ActivityDefinition", + "id": "activityDefinition-test", + "name": "ActivityDefinition_CreateAProcedure", + "title": "CreateProcedure", + "status": "draft", + "description": "Create the procedure.", + "library": [ + { + "reference": "http://test/fhir/Library/TestActivityDefinition|1.0.0" + } + ], + "kind": "ProcedureRequest", + "code": { + "coding": [ + { + "system": "http://test/fhir/System", + "code": "code" + } + ] + }, + "dynamicValue": [ + { + "path": "doNotPerform", + "language": "text/cql.name", + "expression": "ActiveProcedureStatus" + } + ] + }, + "request": { + "method": "PUT", + "url": "ActivityDefinition/activityDefinition-test" + } + }, + { + "resource": { + "resourceType": "Library", + "id": "TestActivityDefinition", + "url": "http://test/fhir/Library/TestActivityDefinition|1.0.0", + "version": "1.0.0", + "name": "TestActivityDefinition", + "parameter": [ + { + "name": "ActiveProcedureStatus", + "use": "out", + "min": 0, + "max": "1", + "type": "boolean" + } + ], + "content": [ + { + "contentType": "text/cql", + "data": "bGlicmFyeSBUZXN0QWN0aXZpdHlEZWZpbml0aW9uIHZlcnNpb24gJzEuMC4wJwoKZGVmaW5lICJBY3RpdmVQcm9jZWR1cmVTdGF0dXMiOgogIHRydWU=" + } + ] + }, + "request": { + "method": "PUT", + "url": "Library/TestActivityDefinition" + } + }, + { + "resource": { + "resourceType": "Patient", + "id": "patient-1" + }, + "request": { + "method": "PUT", + "url": "Patient/patient-1" + } + }] +} diff --git a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM104-FHIR3-8.1.000-bundle.json b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM104-FHIR3-8.1.000-bundle.json index e49e6e481d3..83e1937ff63 100644 --- a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM104-FHIR3-8.1.000-bundle.json +++ b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM104-FHIR3-8.1.000-bundle.json @@ -701,10 +701,6 @@ "lastUpdated": "2019-09-13T20:33:19.546+00:00", "profile": [ "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/proportion-measure-cqfm" ] }, - "text": { - "status": "generated", - "div": "
\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
Measure Title

Discharged on Antithrombotic Therapy

Measure ID

EXM104

Description

Ischemic stroke patients prescribed or continuing to take antithrombotic therapy at hospital discharge

\n
\n
\n
Measurement PeriodJanuary 1 - December 31
Copyright and Disclaimer Notice\n
\n
These performance measures are not clinical guidelines and do not establish a standard of medical care, and have not been tested for all potential applications. The measures and specifications are provided without warranty.
\n
\n
HEDIS%26reg; is a registered trademark of the National Committee for Quality Assurance (NCQA). The HEDIS measures and specifications are owned by NCQA. NCQA holds a copyright in the HEDIS measures and specifications and may rescind or alter these measures and specifications at any time. Users of the HEDIS measures and specifications shall not have the right to alter, enhance or otherwise modify the HEDIS measures and specifications, and shall not disassemble, recompile or reverse engineer the HEDIS measures and specifications. Anyone desiring to use or reproduce the materials, subject to licensed user restrictions, without modification for an internal, non-commercial purpose may do so without obtaining any approval from NCQA. Use of the Rules for Allowable Adjustments of HEDIS to make permitted adjustments of the materials does not constitute a modification. All other uses, including a commercial use, or any external reproduction, distribution and publication must be approved by NCQA and are subject to a license at the discretion of NCQA.\n\nHEDIS measures and specifications are not clinical guidelines, do not establish a standard of medical care and have not been tested for all potential applications. The measures and specifications are provided “as is” without warranty of any kind. NCQA makes no representations, warranties or endorsements about the quality of any product, test or protocol identified as numerator compliant or otherwise identified as meeting the requirements of a HEDIS measure or specification. NCQA also makes no representations, warranties or endorsements about the quality of any organization or clinician who uses or reports performance measures. NCQA has no liability to anyone who relies on HEDIS measures and specifications or data reflective of performance under such measures and specifications.\n\nCalculated measure results, based on unadjusted HEDIS specifications, may not be termed “Health Plan HEDIS rates” until the logic used to produce the measure results has been certified by NCQA and has been audited and designated reportable by an NCQA-Certified Auditor. Such results should be referred to as “Uncertified, Unaudited Health Plan HEDIS Rates.”\n\nCalculated measure results, based on adjusted HEDIS specifications, may not be termed “Adjusted HEDIS rates” until the logic used to produce the measure results has been certified by NCQA and has been audited and designated reportable by an NCQA-Certified Auditor. Such results should be referred to as “Adjusted, Uncertified, Unaudited HEDIS Rates.”\n\nLimited proprietary coding is contained in the measure specifications for convenience. Users of the proprietary code sets should obtain all necessary licenses from the owners of these code sets. NCQA disclaims all liability for use or accuracy of any coding contained in the specifications.\n\nThe American Medical Association holds a copyright to the CPT%26reg; codes contained in the measures specifications.\n\nThe American Hospital Association holds a copyright to the Uniform Bill Codes (UB) contained in the measure specifications. The UB Codes in the HEDIS specifications are included with the permission of the AHA. The UB Codes contained in the HEDIS specifications may be used by health plans and other health care delivery organizations for the purpose of calculating and reporting HEDIS measure results or using HEDIS measure results for their internal quality improvement purposes. All other uses of the UB Codes require a license from the AHA. Anyone desiring to use the UB Codes in a commercial Product(s) to generate HEDIS results, or for any other commercial use, must obtain a commercial use license directly from the AHA. To inquire about licensing, contact ub04@healthforum.com.\n\nSome measure specifications contain coding from LOINC%26reg; (http://loinc.org). The LOINC table, LOINC codes, LOINC panels and form file, LOINC linguistic variants file, LOINC/RSNA Radiology Playbook, and LOINC/IEEE Medical Device Code Mapping Table are copyright %26copy; 1995-2019, Regenstrief Institute, Inc. and the Logical Observation Identifiers Names and Codes (LOINC) Committee and is available at no cost under the license at http://loinc.org/terms-of-use.\n\n"SNOMED” and “SNOMED CT” are registered trademarks of the International Health Terminology Standards Development Organization (IHTSDO).\n\n“HL7” is the registered trademark of Health Level Seven International.\n\n%26copy; 2019 by the National Committee for Quality Assurance, all rights reserved.
\n
\n
Clinical Recommendation Statement\n

Clinical trial results suggest that antithrombotic therapy should be prescribed at discharge following acute ischemic stroke to reduce stroke mortality and morbidity as long as no contraindications exist

\n
\n
References\n
\n
Adams HP, del Zoppo G, Alberts MJ, Bhatt DL, Brass L, Furlan A, Grubb RL, Higashida RT, Jauch EC, Kidwell C, Lyden PD, Morgenstern LB, Qureshi AI, Rosenwasser RH, Scott PA, Wijdicks E. Guidelines for the Early Management of Adults with Ischemic Stroke: A Guideline From the American Heart Association/American Stroke Association Stroke Council, Clinical CardiologyCouncil, Cardiovascular Radiology and Intervention Council, and the Atherosclerotic Peripheral Vascular Disease and Quality of Care Outcomes in Research Interdisciplinary Working Groups. Stroke. 2007;38:1655-1711.
\n
\n
\n
\n
Adams H, Adams R, Del Zoppo G, Goldstein LB. Guidelines for the Early Management of Patients With Ischemic Stroke: Guidelines Update A Scientific Statement From the Stroke Council of the American Heart Association/American Stroke Association. Stroke Vol. 36, 2005: 916:923.
\n
\n
\n
\n
Albers GW, Amarenco P, Easton JD, Sacco RL, Teal P. Antithrombotic and Thrombolytic Therapy for Ischemic Stroke. Chest Vol. 119, 2001: 300-320.
\n
\n
\n
\n
Brott TG, Clark WM, Grotta JC, et al. Stroke the first hours. Guidelines for acute treatment. Consensus Statement. National Stroke Association. 2000.
\n
\n
\n
\n
Chen ZM, Sandercock P, Pan HC, et al. Indications for early aspirin use in acute ischemic stroke: a combined analysis of 40,000 randomized patients from the Chinese acute stroke trial and the international stroke trial. On behalf of the CAST and IST collaborative groups, Stroke 2000;31:1240-1249.
\n
\n
\n
\n
Coull BM, Williams LS, Goldstein LB, et al. Anticoagulants and Antiplatelet Agents in Acute Ischemic Stroke. Report of the Joint Stroke Guideline Development Committee of the American Academy of Neurology and the American Stroke Association (a Division of the American Heart Association) Stroke. 2002;33:1934 - 1942.
\n
\n
\n
\n
Guideline on the Use of Aspirin as Secondary Prophylaxis for Vascular Disease in Primary Care, Centre for Health Services Research University of Newcastle upon Tyne, %26amp; Centre for Health Economics of York, 1998.
\n
\n
\n
\n
Kernan, W.N., B. Ovbiagele, H. R. Black, D. M. Bravata, M. I. Chimowitz, M. D. Ezekowitz, M. C. Fang, M. Fisher, K. L. Furie, D. V. Heck, S. C. Johnston, S. E. Kasner, S. J. Kittner, P. H. Mitchell, M. W. Rich, D. Richardson, L. H. Schwamm, J. A. Wilson. "Guidelines for the Prevention of Stroke in Patients with Stroke and Transient Ischemic Attack: A Guideline for Healthcare Professionals from the American Heart Association/American Stroke Association." [in eng.]  Stroke 45, no. 7 (May 2014): 2160-223. 
\n
\n
Characteristics
ScoringProportion
TypeProcess
Improvement NotationA higher rate indicates better performance.
Guidance\n

The "Non-elective Inpatient Encounter" value set intends to capture all non-scheduled hospitalizations. This value set is a subset of the "Inpatient encounter" value set, excluding concepts that specifically refer to elective hospital admissions. Non-elective admissions include emergency, urgent and unplanned admissions.

\n

The "Medication, Discharge" datatype refers to the discharge medication list and is intended to express medications ordered for post-discharge use.

\n
\n
Definitions
\n \n \n
\n \n \n \n \n
\n

Table of Contents

\n \n
\n \n

Definitions

\n \n

Functions

\n \n

Terminology

\n
\n
    \n \n
\n
\n

Data Criteria (QDM Data Elements)

\n
\n
    \n \n
\n
\n

Supplemental Data Elements

\n \n
\n
    \n
  • None
  • \n
\n
\n

Risk Adjustment Variables

\n
\n
    \n
  • None
  • \n
\n
\n
\n \n \n \n
\n
" - }, "extension": [ { "url": "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/cqfm-populationBasis", "valueCode": "boolean" @@ -818,9 +814,7 @@ "value": "group-1" }, "population": [ { - "identifier": { - "value": "initial-population-identifier" - }, + "id": "initial-population-identifier", "code": { "coding": [ { "system": "http://terminology.hl7.org/CodeSystem/measure-population", @@ -830,9 +824,7 @@ }, "criteria": "Initial Population" }, { - "identifier": { - "value": "numerator-identifier" - }, + "id": "numerator-identifier", "code": { "coding": [ { "system": "http://terminology.hl7.org/CodeSystem/measure-population", @@ -842,9 +834,7 @@ }, "criteria": "Numerator" }, { - "identifier": { - "value": "denominator-identifier" - }, + "id": "denominator-identifier", "code": { "coding": [ { "system": "http://terminology.hl7.org/CodeSystem/measure-population", @@ -854,9 +844,7 @@ }, "criteria": "Denominator" }, { - "identifier": { - "value": "denominator-exclusions-identifier" - }, + "id": "denominator-exclusions-identifier", "code": { "coding": [ { "system": "http://terminology.hl7.org/CodeSystem/measure-population", diff --git a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM124-FHIR3-7.2.000-bundle.json b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM124-FHIR3-7.2.000-bundle.json index 43eb6060da1..09f3b660dff 100644 --- a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM124-FHIR3-7.2.000-bundle.json +++ b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/connectathon/EXM124-FHIR3-7.2.000-bundle.json @@ -3340,10 +3340,6 @@ "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/proportion-measure-cqfm" ] }, - "text": { - "status": "generated", - "div": "
Id: Measure/exm124-FHIR3
Type:
system: http://hl7.org/fhir/measure-type
code: process
Identifier: system: http://hl7.org/fhir/cqi/ecqm/Measure/Identifier/cms
value: 130
system: http://hl7.org/fhir/cqi/ecqm/Measure/Identifier/nqf
value: 0034
Title: Cervical Cancer Screening
Status: active
Description: Percentage of women 21-64 years of age who were screened for cervical cancer using either of the following criteria: <br> * Women age 21-64 who had cervical cytology performed every 3 years <br.* Women age 30-64 who had cervical cytology/human papillomavirus (HPV) co-testing performed every 5 years
Purpose: Women 23-64 years of age with a visit during the measurement period
Use Context:

code: program

value:
text: eligible-provider

Topic:
system: http://loinc.org
code: 57024-2
display: Health Quality Measure Document
Contributor: author: National Committee for Quality Assurance
Related:

type: citation

citation:
Howlader, N., Noone, A.M., Krapcho, M., Miller, D., Bishop, K., Altekruse, S.F., Kosary, C.L., Yu, M., Ruhl, J., Tatalovich, Z., Mariotto, A., Lewis, D.R., Chen, H.S., Feuer, E.J., and Cronin, K.A. 2016. "SEER Cancer Statistics Review, 1975-2013." National Cancer Institute. (December 5, 2016) http://seer.cancer.gov/csr/1975_2013/

type: citation

citation:
National Business Group on Health. 2011. "Pathways to Managing Cancer in the Workplace." (May 8, 2012). http://www.tcyh.org/employers/downloads/Pathways_Managing_Cancer_2011.pdf

type: citation

citation:
Feig S. 2011. Comparison of Costs and Benefits of Breast Cancer Screening with Mammography, Ultrasonagraphy, and MRI. Obstetrics and Gynecology Clinics of North America. 38(1):179-96.

type: citation

citation:
U.S. Preventive Services Task Force (USPSTF). 2016. "Screening for Breast Cancer: U.S. Preventive Services Task Force Recommendation Statement." Ann Intern Med. 164(4):279-296. doi:10.7326/M15-2886.

Library:

reference: Library/exm124-FHIR3

Disclaimer: The performance Measure is not a clinical guideline and does not establish a standard of medical care, and has not been tested for all potential applications. THE MEASURE AND SPECIFICATIONS ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. <br> Due to technical limitations, registered trademarks are indicated by (R) or [R] and unregistered trademarks are indicated by (TM) or [TM].
Scoring:
system: http://hl7.org/fhir/measure-scoring
code: proportion
Rationale: Breast cancer is one of the most common types of cancers, accounting for 15 percent of all new cancer diagnoses in the U.S. (Howlader et al, 2016). In 2013, over 3 million women were estimated to be living with breast cancer in the U.S. and it is estimated that 12 percent of women will be diagnosed with breast cancer at some point during their lifetime (Howlader et al, 2016). <br> While there are other factors that affect a woman's risk of developing breast cancer, advancing age is a primary risk factor. Breast cancer is most frequently diagnosed among women ages 55-64; the median age at diagnosis is 62 years (Howlader et al, 2016). The chance of a woman being diagnosed with breast cancer in a given year increases with age. By age 40, the chances are 1 in 235; by age 50 it becomes 1 in 54; by age 60, it is 1 in 25 (National Business Group on Health, 2011). <br> In the U.S., costs associated with a diagnosis of breast cancer range from $451 to $2,520, factoring in continued testing, multiple office visits and procedures. The total costs related to breast cancer add up to nearly $7 billion per year in the U.S., including $2 billion spent on late-stage treatment (National Business Group on Health, 2011). If breast cancer is detected through mammography screening and diagnosed in its earliest stages, treatment may be less expensive (Feig, 2011).
Clinical Recommendation: The U.S. Preventive Services Task Force (USPSTF) recommends biennial screening mammography for women aged 50-74 years (B recommendation). <br> The decision to start screening mammography in women prior to age 50 years should be an individual one. Women who place a higher value on the potential benefit than the potential harms may choose to begin biennial screening between the ages of 40 and 49 years (C recommendation). (USPSTF, 2016) <br> The USPSTF concludes that the current evidence is insufficient to assess the balance of benefits and harms of screening mammography in women aged 75 years or older (I statement). (USPSTF, 2016) <br> The USPSTF concludes that the current evidence is insufficient to assess the benefits and harms of digital breast tomosynthesis (DBT) as a primary screening method for breast cancer (I Statement). (USPSTF, 2016) <br> The USPSTF concludes that the current evidence is insufficient to assess the balance of benefits and harms of adjunctive screening for breast cancer using breast ultrasonography, magnetic resonance imaging, DBT, or other methods in women identified to have dense breasts on an otherwise negative screening mammogram (I statement). (USPSTF, 2016)
Guidance: To ensure the measure is only looking for a cervical cytology test only after a woman turns 21 years of age, the youngest age in the initial population is 23. <br> Patient self-report for procedures as well as diagnostic studies should be recorded in 'Procedure, Performed' template or 'Diagnostic Study, Performed' template in QRDA-1. <br> Include only cytology and HPV "co-testing"; in co-testing, both cytology and HPV tests are performed (i.e., the samples are collected and both tests are ordered, regardless of the cytology result) on the same date of service. Do not include reflex testing. In addition, if the medical record indicates the HPV test was performed only after determining the cytology result, this is considered reflex testing and does not meet criteria for the measure.
Group:

Identifier:
value: group-1

Population:

Identifier:
value: initial-population-identifier

criteria: Initial Population

Identifier:
value: numerator-identifier

criteria: Numerator

Identifier:
value: denominator-identifier

criteria: Denominator

Identifier:
value: denominator-exclusions-identifier

criteria: Denominator Exclusion

Supplemental Data:

Identifier:
value: sde-ethnicity

usage:
system: http://hl7.org/fhir/measure-data-usage
code: supplemental-data

criteria: SDE Ethnicity

Identifier:
value: sde-payer

usage:
system: http://hl7.org/fhir/measure-data-usage
code: supplemental-data

criteria: SDE Payer

Identifier:
value: sde-race

usage:
system: http://hl7.org/fhir/measure-data-usage
code: supplemental-data

criteria: SDE Race

Identifier:
value: sde-sex

usage:
system: http://hl7.org/fhir/measure-data-usage
code: supplemental-data

criteria: SDE Sex

" - }, "extension": [ { "url": "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/cqfm-populationBasis", @@ -3486,9 +3482,7 @@ }, "population": [ { - "identifier": { - "value": "initial-population-identifier" - }, + "id": "initial-population-identifier", "code": { "coding": [ { @@ -3501,9 +3495,7 @@ "criteria": "Initial Population" }, { - "identifier": { - "value": "numerator-identifier" - }, + "id": "numerator-identifier", "code": { "coding": [ { @@ -3516,9 +3508,7 @@ "criteria": "Numerator" }, { - "identifier": { - "value": "denominator-identifier" - }, + "id": "denominator-identifier", "code": { "coding": [ { @@ -3531,9 +3521,7 @@ "criteria": "Denominator" }, { - "identifier": { - "value": "denominator-exclusions-identifier" - }, + "id": "denominator-exclusions-identifier", "code": { "coding": [ { diff --git a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-careplan.json b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-careplan.json new file mode 100644 index 00000000000..61149ee278a --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-careplan.json @@ -0,0 +1,55 @@ +{ + "resourceType": "CarePlan", + "id": "hello-world-patient-view", + "contained": [ + { + "resourceType": "RequestGroup", + "id": "hello-world-patient-view", + "definition": [ + { + "reference": "http://fhir.org/guides/cdc/opioid-cds/PlanDefinition/hello-world-patient-view|1.0.0" + } + ], + "status": "draft", + "intent": "proposal", + "subject": { + "reference": "Patient/helloworld-patient-1" + }, + "context": { + "reference": "Encounter/helloworld-patient-1-encounter-1" + }, + "action": [ + { + "title": "Hello World!", + "description": "The CDS Service is alive and communicating successfully!", + "condition": [ + { + "kind": "start", + "expression": "Main Action Condition Expression Is True" + } + ] + } + ] + } + ], + "definition": [ + { + "reference": "http://fhir.org/guides/cdc/opioid-cds/PlanDefinition/hello-world-patient-view|1.0.0" + } + ], + "status": "draft", + "intent": "proposal", + "subject": { + "reference": "Patient/helloworld-patient-1" + }, + "context": { + "reference": "Encounter/helloworld-patient-1-encounter-1" + }, + "activity": [ + { + "reference": { + "reference": "#RequestGroup/hello-world-patient-view" + } + } + ] +} diff --git a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-data.json b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-data.json new file mode 100644 index 00000000000..d8c5f285036 --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-data.json @@ -0,0 +1,459 @@ +{ + "resourceType": "Bundle", + "id": "hello-world-patient-data", + "type": "transaction", + "entry": [ + { + "resource": { + "resourceType": "Encounter", + "id": "helloworld-patient-1-encounter-1", + "meta": { + "profile": [ + "http://hl7.org/fhir/us/qicore/StructureDefinition/qicore-encounter" + ] + }, + "status": "finished", + "class": { + "system": "http://terminology.hl7.org/CodeSystem/v3-ActCode", + "code": "AMB", + "display": "ambulatory" + }, + "type": [ + { + "coding": [ + { + "system": "http://snomed.info/sct", + "version": "2020-09", + "code": "185463005", + "display": "Visit out of hours (procedure)" + } + ] + } + ], + "subject": { + "reference": "Patient/helloworld-patient-1" + }, + "period": { + "start": "2020-01-01T10:00:00-07:00", + "end": "2020-01-01T11:00:00-07:00" + } + }, + "request": { + "method": "PUT", + "url": "Encounter/helloworld-patient-1-encounter-1" + } + }, + { + "resource": { + "resourceType": "Patient", + "id": "helloworld-patient-1", + "meta": { + "profile": [ + "http://hl7.org/fhir/us/qicore/StructureDefinition/qicore-patient" + ] + }, + "extension": [ + { + "url": "http://hl7.org/fhir/us/core/StructureDefinition/us-core-race", + "extension": [ + { + "url": "ombCategory", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2106-3", + "display": "White" + } + }, + { + "url": "ombCategory", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "1002-5", + "display": "American Indian or Alaska Native" + } + }, + { + "url": "ombCategory", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2028-9", + "display": "Asian" + } + }, + { + "url": "detailed", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "1586-7", + "display": "Shoshone" + } + }, + { + "url": "detailed", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2036-2", + "display": "Filipino" + } + }, + { + "url": "text", + "valueString": "Mixed" + } + ] + }, + { + "url": "http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity", + "extension": [ + { + "url": "ombCategory", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2135-2", + "display": "Hispanic or Latino" + } + }, + { + "url": "detailed", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2184-0", + "display": "Dominican" + } + }, + { + "url": "detailed", + "valueCoding": { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2148-5", + "display": "Mexican" + } + }, + { + "url": "text", + "valueString": "Hispanic or Latino" + } + ] + }, + { + "url": "http://hl7.org/fhir/us/core/StructureDefinition/us-core-birthsex", + "valueCode": "M" + } + ], + "identifier": [ + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "MR" + } + ] + }, + "system": "urn:oid:1.2.36.146.595.217.0.1", + "value": "12345", + "period": { + "start": "2001-05-06" + }, + "assigner": { + "display": "Acme Healthcare" + } + } + ], + "active": true, + "name": [ + { + "use": "official", + "family": "Doe", + "given": [ + "John" + ] + } + ], + "gender": "male", + "birthDate": "1991-01-01", + "deceasedBoolean": false + }, + "request": { + "method": "PUT", + "url": "Patient/helloworld-patient-1" + } + }, + { + "resource": { + "resourceType": "MeasureReport", + "id": "measurereport-helloworld-patient-1", + "contained": [ + { + "resourceType": "Observation", + "id": "256ebb02-c5d6-4b37-ae5f-66b027e67c53", + "extension": [ + { + "url": "http://hl7.org/fhir/StructureDefinition/cqf-measureInfo", + "extension": [ + { + "url": "measure", + "valueUri": "http://content.alphora.com/fhir/dqm/Measure/helloworld" + }, + { + "url": "populationId", + "valueString": "sde-race" + } + ] + } + ], + "status": "final", + "code": { + "text": "sde-race" + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2106-3", + "display": "White" + } + ] + } + }, + { + "resourceType": "Observation", + "id": "54f1aa42-627e-45df-a655-18a88c5f5d6c", + "extension": [ + { + "url": "http://hl7.org/fhir/StructureDefinition/cqf-measureInfo", + "extension": [ + { + "url": "measure", + "valueUri": "http://content.alphora.com/fhir/dqm/Measure/helloworld" + }, + { + "url": "populationId", + "valueString": "sde-payer" + } + ] + } + ], + "status": "final", + "code": { + "text": "sde-payer" + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "https://nahdo.org/sopt", + "code": "31", + "display": "Department of Defense" + } + ] + } + }, + { + "resourceType": "Observation", + "id": "2499fe18-2a97-4ad8-8d5f-154d94f5f4e9", + "extension": [ + { + "url": "http://hl7.org/fhir/StructureDefinition/cqf-measureInfo", + "extension": [ + { + "url": "measure", + "valueUri": "http://content.alphora.com/fhir/dqm/Measure/helloworld" + }, + { + "url": "populationId", + "valueString": "sde-ethnicity" + } + ] + } + ], + "status": "final", + "code": { + "text": "sde-ethnicity" + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/PHRaceAndEthnicityCDC", + "code": "2135-2", + "display": "Hispanic or Latino" + } + ] + } + }, + { + "resourceType": "Observation", + "id": "6f2b2210-564c-4614-bbe7-0fe5de354b99", + "extension": [ + { + "url": "http://hl7.org/fhir/StructureDefinition/cqf-measureInfo", + "extension": [ + { + "url": "measure", + "valueUri": "http://content.alphora.com/fhir/dqm/Measure/helloworld" + }, + { + "url": "populationId", + "valueString": "sde-sex" + } + ] + } + ], + "status": "final", + "code": { + "text": "sde-sex" + }, + "valueCodeableConcept": { + "coding": [ + { + "code": "M" + } + ] + } + } + ], + "status": "complete", + "type": "individual", + "measure": { + "reference": "http://content.alphora.com/fhir/dqm/Measure/helloworld" + }, + "patient": { + "reference": "Patient/helloworld-patient-1" + }, + "period": { + "start": "2020-01-01T00:00:00-07:00", + "end": "2020-12-31T00:00:00-07:00" + }, + "group": [ + { + "id": "group-1", + "population": [ + { + "code": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/measure-population", + "code": "initial-population", + "display": "Initial Population" + } + ] + }, + "count": 0 + }, + { + "code": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/measure-population", + "code": "numerator", + "display": "Numerator" + } + ] + }, + "count": 0 + }, + { + "code": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/measure-population", + "code": "denominator", + "display": "Denominator" + } + ] + }, + "count": 0 + } + ] + }, + { + "id": "group-2", + "population": [ + { + "code": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/measure-population", + "code": "initial-population", + "display": "Initial Population" + } + ] + }, + "count": 0 + }, + { + "code": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/measure-population", + "code": "numerator", + "display": "Numerator" + } + ] + }, + "count": 0 + }, + { + "code": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/measure-population", + "code": "denominator", + "display": "Denominator" + } + ] + }, + "count": 0 + } + ] + } + ] + }, + "request": { + "method": "PUT", + "url": "MeasureReport/measurereport-helloworld-patient-1" + } + }, + { + "resource": { + "resourceType": "Coverage", + "id": "helloworld-patient-1-coverage-1", + "meta": { + "profile": [ + "http://hl7.org/fhir/us/qicore/StructureDefinition/qicore-coverage" + ] + }, + "status": "active", + "type": { + "coding": [ + { + "system": "https://nahdo.org/sopt", + "version": "9.2", + "code": "31", + "display": "Department of Defense" + } + ] + }, + "policyHolder": { + "reference": "Patient/helloworld-patient-1" + }, + "beneficiary": { + "reference": "Patient/helloworld-patient-1" + }, + "period": { + "start": "2020-01-01T00:00:00-07:00", + "end": "2021-01-01T00:00:00-07:00" + }, + "payor": [ + { + "reference": "Patient/helloworld-patient-1" + } + ] + }, + "request": { + "method": "PUT", + "url": "Coverage/helloworld-patient-1-coverage-1" + } + } + ] +} diff --git a/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-view-bundle.json b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-view-bundle.json new file mode 100644 index 00000000000..0847f32fa59 --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/resources/ca/uhn/fhir/cr/dstu3/hello-world/hello-world-patient-view-bundle.json @@ -0,0 +1,214 @@ +{ + "resourceType": "Bundle", + "id": "hello-world-patient-view-bundle", + "type": "transaction", + "entry": [ + { + "resource": { + "resourceType": "PlanDefinition", + "id": "hello-world-patient-view", + "meta": { + "profile": [ + "http://hl7.org/fhir/uv/cpg/StructureDefinition/cpg-recommendationdefinition" + ] + }, + "url": "http://fhir.org/guides/cdc/opioid-cds/PlanDefinition/hello-world-patient-view", + "identifier": [ + { + "use": "official", + "value": "helloworld-patient-view-sample" + } + ], + "version": "1.0.0", + "name": "HelloWorldPatientView", + "title": "Hello World (patient-view)", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/plan-definition-type", + "code": "eca-rule", + "display": "ECA Rule" + } + ] + }, + "status": "draft", + "experimental": true, + "date": "2021-05-26T00:00:00-08:00", + "publisher": "Alphora", + "description": "This PlanDefinition defines a simple Hello World recommendation that triggers on patient-view.", + "useContext": [ + { + "code": { + "system": "http://terminology.hl7.org/CodeSystem/usage-context-type", + "version": "3.2.0", + "code": "focus", + "display": "Clinical Focus" + } + } + ], + "jurisdiction": [ + { + "coding": [ + { + "system": "http://hl7.org/fhir/ValueSet/iso3166-1-3", + "version": "3.2.0", + "code": "USA", + "display": "United States of America" + } + ] + } + ], + "purpose": "The purpose of this is to test the system to make sure we have complete end-to-end functionality", + "usage": "This is to be used in conjunction with a patient-facing FHIR application.", + "copyright": "© CDC 2016+.", + "library": [ + { + "reference": "http://fhir.org/guides/cdc/opioid-cds/Library/HelloWorld" + } + ], + "action": [ + { + "title": "Hello World!", + "description": "A simple Hello World (patient-view) recommendation", + "triggerDefinition": [ + { + "type": "named-event", + "eventName": "patient-view" + } + ], + "condition": [ + { + "kind": "start", + "description": "Whether or not a Hello World! card should be returned", + "language": "text/cql.identifier", + "expression": "Main Action Condition Expression Is True" + } + ], + "dynamicValue": [ + { + "path": "action.title", + "language": "text/cql.identifier", + "expression": "Get Title" + }, + { + "path": "action.description", + "language": "text/cql.identifier", + "expression": "Get Description" + } + ] + } + ] + }, + "request": { + "method": "PUT", + "url": "PlanDefinition/hello-world-patient-view" + } + }, + { + "resource": { + "resourceType": "Library", + "id": "HelloWorld", + "extension": [ + { + "url": "http://hl7.org/fhir/us/cqfmeasures/StructureDefinition/cqfm-softwaresystem", + "valueReference": { + "reference": "Device/cqf-tooling" + } + } + ], + "url": "http://fhir.org/guides/cdc/opioid-cds/Library/HelloWorld", + "version": "1.0.0", + "name": "HelloWorld", + "relatedArtifact": [ + { + "type": "depends-on", + "display": "FHIR model information", + "resource": { + "reference": "http://fhir.org/guides/cqf/common/Library/FHIR-ModelInfo|3.2.0" + } + } + ], + "parameter": [ + { + "name": "Patient", + "use": "out", + "min": 0, + "max": "1", + "type": "Patient" + }, + { + "name": "Info", + "use": "out", + "min": 0, + "max": "1", + "type": "string" + }, + { + "name": "Warning", + "use": "out", + "min": 0, + "max": "1", + "type": "string" + }, + { + "name": "Critical", + "use": "out", + "min": 0, + "max": "1", + "type": "string" + }, + { + "name": "Main Action Condition Expression Is True", + "use": "out", + "min": 0, + "max": "1", + "type": "boolean" + }, + { + "name": "Get Title", + "use": "out", + "min": 0, + "max": "1", + "type": "string" + }, + { + "name": "Get Description", + "use": "out", + "min": 0, + "max": "1", + "type": "string" + }, + { + "name": "Get Indicator", + "use": "out", + "min": 0, + "max": "1", + "type": "string" + } + ], + "dataRequirement": [ + { + "type": "Patient", + "profile": [ + "http://hl7.org/fhir/StructureDefinition/Patient" + ] + } + ], + "content": [ + { + "contentType": "text/cql", + "data": "bGlicmFyeSBIZWxsb1dvcmxkIHZlcnNpb24gJzEuMC4wJwoKdXNpbmcgRkhJUiB2ZXJzaW9uICczLjIuMCcKCi8qIGluY2x1ZGUgRkhJUkhlbHBlcnMgdmVyc2lvbiAnMy4yLjAnKi8KCmNvbnRleHQgUGF0aWVudAoKZGVmaW5lICJJbmZvIjoKICAnaW5mbycKCmRlZmluZSAiV2FybmluZyI6CiAgJ3dhcm5pbmcnCgpkZWZpbmUgIkNyaXRpY2FsIjoKICAnY3JpdGljYWwnCgpkZWZpbmUgIk1haW4gQWN0aW9uIENvbmRpdGlvbiBFeHByZXNzaW9uIElzIFRydWUiOgogIHRydWUKCmRlZmluZSAiR2V0IFRpdGxlIjoKICAnSGVsbG8gV29ybGQhJwoKZGVmaW5lICJHZXQgRGVzY3JpcHRpb24iOgogICdUaGUgQ0RTIFNlcnZpY2UgaXMgYWxpdmUgYW5kIGNvbW11bmljYXRpbmcgc3VjY2Vzc2Z1bGx5IScKCmRlZmluZSAiR2V0IEluZGljYXRvciI6CiAgJ2luZm8nCg==" + }, + { + "contentType": "application/elm+xml", + "data": "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPGxpYnJhcnkgeG1sbnM9InVybjpobDctb3JnOmVsbTpyMSIgeG1sbnM6dD0idXJuOmhsNy1vcmc6ZWxtLXR5cGVzOnIxIiB4bWxuczp4c2k9Imh0dHA6Ly93d3cudzMub3JnLzIwMDEvWE1MU2NoZW1hLWluc3RhbmNlIiB4bWxuczp4c2Q9Imh0dHA6Ly93d3cudzMub3JnLzIwMDEvWE1MU2NoZW1hIiB4bWxuczpmaGlyPSJodHRwOi8vaGw3Lm9yZy9maGlyIiB4bWxuczpxZG00Mz0idXJuOmhlYWx0aGl0LWdvdjpxZG06djRfMyIgeG1sbnM6cWRtNTM9InVybjpoZWFsdGhpdC1nb3Y6cWRtOnY1XzMiIHhtbG5zOmE9InVybjpobDctb3JnOmNxbC1hbm5vdGF0aW9uczpyMSI+CiAgIDxhbm5vdGF0aW9uIHRyYW5zbGF0b3JWZXJzaW9uPSIyLjIuMC1TTkFQU0hPVCIgdHJhbnNsYXRvck9wdGlvbnM9IiIgeHNpOnR5cGU9ImE6Q3FsVG9FbG1JbmZvIi8+CiAgIDxpZGVudGlmaWVyIGlkPSJIZWxsb1dvcmxkIiB2ZXJzaW9uPSIxLjAuMCIvPgogICA8c2NoZW1hSWRlbnRpZmllciBpZD0idXJuOmhsNy1vcmc6ZWxtIiB2ZXJzaW9uPSJyMSIvPgogICA8dXNpbmdzPgogICAgICA8ZGVmIGxvY2FsSWRlbnRpZmllcj0iU3lzdGVtIiB1cmk9InVybjpobDctb3JnOmVsbS10eXBlczpyMSIvPgogICAgICA8ZGVmIGxvY2FsSWRlbnRpZmllcj0iRkhJUiIgdXJpPSJodHRwOi8vaGw3Lm9yZy9maGlyIiB2ZXJzaW9uPSIzLjIuMCIvPgogICA8L3VzaW5ncz4KICAgPGNvbnRleHRzPgogICAgICA8ZGVmIG5hbWU9IlBhdGllbnQiLz4KICAgPC9jb250ZXh0cz4KICAgPHN0YXRlbWVudHM+CiAgICAgIDxkZWYgbmFtZT0iUGF0aWVudCIgY29udGV4dD0iUGF0aWVudCI+CiAgICAgICAgIDxleHByZXNzaW9uIHhzaTp0eXBlPSJTaW5nbGV0b25Gcm9tIj4KICAgICAgICAgICAgPG9wZXJhbmQgZGF0YVR5cGU9ImZoaXI6UGF0aWVudCIgeHNpOnR5cGU9IlJldHJpZXZlIi8+CiAgICAgICAgIDwvZXhwcmVzc2lvbj4KICAgICAgPC9kZWY+CiAgICAgIDxkZWYgbmFtZT0iSW5mbyIgY29udGV4dD0iUGF0aWVudCIgYWNjZXNzTGV2ZWw9IlB1YmxpYyI+CiAgICAgICAgIDxleHByZXNzaW9uIHZhbHVlVHlwZT0idDpTdHJpbmciIHZhbHVlPSJpbmZvIiB4c2k6dHlwZT0iTGl0ZXJhbCIvPgogICAgICA8L2RlZj4KICAgICAgPGRlZiBuYW1lPSJXYXJuaW5nIiBjb250ZXh0PSJQYXRpZW50IiBhY2Nlc3NMZXZlbD0iUHVibGljIj4KICAgICAgICAgPGV4cHJlc3Npb24gdmFsdWVUeXBlPSJ0OlN0cmluZyIgdmFsdWU9Indhcm5pbmciIHhzaTp0eXBlPSJMaXRlcmFsIi8+CiAgICAgIDwvZGVmPgogICAgICA8ZGVmIG5hbWU9IkNyaXRpY2FsIiBjb250ZXh0PSJQYXRpZW50IiBhY2Nlc3NMZXZlbD0iUHVibGljIj4KICAgICAgICAgPGV4cHJlc3Npb24gdmFsdWVUeXBlPSJ0OlN0cmluZyIgdmFsdWU9ImNyaXRpY2FsIiB4c2k6dHlwZT0iTGl0ZXJhbCIvPgogICAgICA8L2RlZj4KICAgICAgPGRlZiBuYW1lPSJNYWluIEFjdGlvbiBDb25kaXRpb24gRXhwcmVzc2lvbiBJcyBUcnVlIiBjb250ZXh0PSJQYXRpZW50IiBhY2Nlc3NMZXZlbD0iUHVibGljIj4KICAgICAgICAgPGV4cHJlc3Npb24gdmFsdWVUeXBlPSJ0OkJvb2xlYW4iIHZhbHVlPSJ0cnVlIiB4c2k6dHlwZT0iTGl0ZXJhbCIvPgogICAgICA8L2RlZj4KICAgICAgPGRlZiBuYW1lPSJHZXQgVGl0bGUiIGNvbnRleHQ9IlBhdGllbnQiIGFjY2Vzc0xldmVsPSJQdWJsaWMiPgogICAgICAgICA8ZXhwcmVzc2lvbiB2YWx1ZVR5cGU9InQ6U3RyaW5nIiB2YWx1ZT0iSGVsbG8gV29ybGQhIiB4c2k6dHlwZT0iTGl0ZXJhbCIvPgogICAgICA8L2RlZj4KICAgICAgPGRlZiBuYW1lPSJHZXQgRGVzY3JpcHRpb24iIGNvbnRleHQ9IlBhdGllbnQiIGFjY2Vzc0xldmVsPSJQdWJsaWMiPgogICAgICAgICA8ZXhwcmVzc2lvbiB2YWx1ZVR5cGU9InQ6U3RyaW5nIiB2YWx1ZT0iVGhlIENEUyBTZXJ2aWNlIGlzIGFsaXZlIGFuZCBjb21tdW5pY2F0aW5nIHN1Y2Nlc3NmdWxseSEiIHhzaTp0eXBlPSJMaXRlcmFsIi8+CiAgICAgIDwvZGVmPgogICAgICA8ZGVmIG5hbWU9IkdldCBJbmRpY2F0b3IiIGNvbnRleHQ9IlBhdGllbnQiIGFjY2Vzc0xldmVsPSJQdWJsaWMiPgogICAgICAgICA8ZXhwcmVzc2lvbiB2YWx1ZVR5cGU9InQ6U3RyaW5nIiB2YWx1ZT0iaW5mbyIgeHNpOnR5cGU9IkxpdGVyYWwiLz4KICAgICAgPC9kZWY+CiAgIDwvc3RhdGVtZW50cz4KPC9saWJyYXJ5Pg==" + } + ] + }, + "request": { + "method": "PUT", + "url": "Library/HelloWorld" + } + } + ] +} diff --git a/pom.xml b/pom.xml index 2ce4f6d190a..a9efbc6be4a 100644 --- a/pom.xml +++ b/pom.xml @@ -1006,7 +1006,7 @@ 1.0.8 - 3.2.0 + 3.3.1 5.4.1 From 80d382d8cdbccfd3a3448bf6fa9b3f2e65df5068 Mon Sep 17 00:00:00 2001 From: TipzCM Date: Mon, 15 Apr 2024 19:36:27 -0400 Subject: [PATCH 12/26] 5845 add new bulk export hook for parameter massaging (#5846) * added an additional pointcut * spotless * version bump --------- Co-authored-by: leif stawnyczy --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- .../ca/uhn/fhir/interceptor/api/Pointcut.java | 42 ++++++++++++++ hapi-fhir-bom/pom.xml | 4 +- hapi-fhir-checkstyle/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 2 +- ...added-additional-bulk-export-pointcut.yaml | 11 ++++ hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jpa/pom.xml | 2 +- hapi-fhir-jpaserver-base/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-jpaserver-hfql/pom.xml | 2 +- hapi-fhir-jpaserver-ips/pom.xml | 2 +- hapi-fhir-jpaserver-mdm/pom.xml | 2 +- hapi-fhir-jpaserver-model/pom.xml | 2 +- hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu2/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu3/pom.xml | 2 +- hapi-fhir-jpaserver-test-r4/pom.xml | 2 +- .../jpa/bulk/BulkDataExportProviderTest.java | 58 ++++++++++++++++++- hapi-fhir-jpaserver-test-r4b/pom.xml | 2 +- hapi-fhir-jpaserver-test-r5/pom.xml | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 2 +- hapi-fhir-server-cds-hooks/pom.xml | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- hapi-fhir-server-openapi/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- .../hapi-fhir-caching-api/pom.xml | 2 +- .../hapi-fhir-caching-caffeine/pom.xml | 4 +- .../hapi-fhir-caching-guava/pom.xml | 2 +- .../hapi-fhir-caching-testing/pom.xml | 2 +- hapi-fhir-serviceloaders/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-sql-migrate/pom.xml | 2 +- hapi-fhir-storage-batch2-jobs/pom.xml | 2 +- .../jobs/export/BulkDataExportProvider.java | 43 ++++++++++---- .../pom.xml | 2 +- hapi-fhir-storage-batch2/pom.xml | 2 +- hapi-fhir-storage-cr/pom.xml | 2 +- hapi-fhir-storage-mdm/pom.xml | 2 +- hapi-fhir-storage-test-utilities/pom.xml | 2 +- hapi-fhir-storage/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- hapi-fhir-structures-r4b/pom.xml | 2 +- hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r4b/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 2 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 81 files changed, 220 insertions(+), 92 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5845-added-additional-bulk-export-pointcut.yaml diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 5e8b7d27de7..53ce20c4618 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index c12e6e57b77..255e4ffe450 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index fb35d780562..a491b2b05b5 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java index 8cb3fe18a3c..8306311f503 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java @@ -1080,10 +1080,52 @@ public enum Pointcut implements IPointcut { SUBSCRIPTION_TOPIC_AFTER_PERSISTED_RESOURCE_CHECKED( void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), + /** + * Storage Hook: + * Invoked when a Bulk Export job is being kicked off, but before any permission checks + * have been done. + * This hook can be used to modify or update parameters as need be before + * authorization/permission checks are done. + *

+ * Hooks may accept the following parameters: + *

+ *
    + *
  • + * ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions - The details of the job being kicked off + *
  • + *
  • + * ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the + * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been + * pulled out of the servlet request. Note that the bean + * properties are not all guaranteed to be populated, depending on how early during processing the + * exception occurred. Note that this parameter may be null in contexts where the request is not + * known, such as while processing searches + *
  • + *
  • + * ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the + * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been + * pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will + * only be populated when operating in a RestfulServer implementation. It is provided as a convenience. + *
  • + *
+ *

+ * Hooks should return void, and can throw exceptions. + *

+ */ + STORAGE_PRE_INITIATE_BULK_EXPORT( + void.class, + "ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), + /** * Storage Hook: * Invoked when a Bulk Export job is being kicked off. Hook methods may modify * the request, or raise an exception to prevent it from being initiated. + * + * This hook is not guaranteed to be called before permission checks, and so + * anu implementers should be cautious of changing the options in ways that would + * affect permissions. *

* Hooks may accept the following parameters: *

diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 4da0f9c1d40..b7b350fe12c 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index 08e0b9c2eaf..cfd0c7ab084 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index eed137d374d..b01715cabb8 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index de8fa790b0b..149c8945a0f 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index 4fc7c8e427e..d5a7d1adbe2 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index ce20d1ab6b2..a6f01526c67 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 638f1ed7be0..03db163bf02 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index fca0987ddf5..eb86eb2ffba 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index d90951583dd..476a69d129c 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index 52d01b8dfbf..5531051b6c4 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5845-added-additional-bulk-export-pointcut.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5845-added-additional-bulk-export-pointcut.yaml new file mode 100644 index 00000000000..6ee3be1bede --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5845-added-additional-bulk-export-pointcut.yaml @@ -0,0 +1,11 @@ +--- +type: add +issue: 5845 +title: "Added a new pointcut: STORAGE_PRE_INITIATE_BULK_EXPORT. + This pointcut is meant to be called explicitly before + STORAGE_INITIATE_BULK_EXPORT, so that parameter + manipulation that needs to be done before permission + checks (currently done using STORAGE_INITIATE_BULK_EXPORT) + can safely be done without risk of affecting permission checks/ + authorization. +" diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index e7c060a760f..a58f0810b18 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index e00c808a8c9..ad7d3a76c82 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index a530d5d127d..dbd315186f3 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 3e92c57b452..9314c0af2ad 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index 17c3a08aa50..2be23223a1c 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml index 99bed5fc6d0..edb3b7bfbd6 100644 --- a/hapi-fhir-jpaserver-hfql/pom.xml +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index eb66d27cfac..937f7b155e6 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 2a8ed1e5e9b..e7b26745ade 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 23b8a5f67c5..ec13e7a2b20 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 1cbb7748009..0637fd3251b 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index cc08fd09695..fa48039813e 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index 1ced5f66cf1..b73bfd76afb 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index 7868b2b95a2..c931a7db4bc 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index 9a9709161a5..55d4b2259c0 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 33cc8c258fc..ad2b6dfc33b 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -7,6 +7,9 @@ import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; @@ -68,6 +71,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Stream; import static org.hamcrest.MatcherAssert.assertThat; @@ -110,6 +114,10 @@ public class BulkDataExportProviderTest { IFhirResourceDao myFhirResourceDao; @Mock IJobCoordinator myJobCoordinator; + + @Mock + private IInterceptorBroadcaster myInterceptorBroadcaster; + @InjectMocks private BulkDataExportProvider myProvider; @RegisterExtension @@ -1037,6 +1045,53 @@ public class BulkDataExportProviderTest { } } + + + @ParameterizedTest + @ValueSource(strings = { + "$export", + "Patient/$export", + "Patient//$export", + "Group//$export" + }) + public void testBulkDataExport_hookOrder_isMaintained(String theUrl) throws IOException { + // setup + String url = String.format( + "http://localhost:%s/%s", + myServer.getPort(), + theUrl.replaceAll("", "1")); + AtomicBoolean preInitiateCalled = new AtomicBoolean(false); + AtomicBoolean initiateCalled = new AtomicBoolean(false); + + // when + when(myInterceptorBroadcaster.callHooks(eq(Pointcut.STORAGE_PRE_INITIATE_BULK_EXPORT), any(HookParams.class))) + .thenAnswer((args) -> { + assertFalse(initiateCalled.get()); + assertFalse(preInitiateCalled.getAndSet(true)); + return true; + }); + when(myInterceptorBroadcaster.callHooks(eq(Pointcut.STORAGE_INITIATE_BULK_EXPORT), any(HookParams.class))) + .thenAnswer((args) -> { + assertTrue(preInitiateCalled.get()); + assertFalse(initiateCalled.getAndSet(true)); + return true; + }); + when(myJobCoordinator.startInstance(isNotNull(), any())) + .thenReturn(createJobStartResponse()); + + // test + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + try (CloseableHttpResponse response = myClient.execute(get)) { + ourLog.info("Response: {}", response.toString()); + assertEquals(202, response.getStatusLine().getStatusCode()); + } + + // verify + assertTrue(preInitiateCalled.get()); + assertTrue(initiateCalled.get()); + } + @Test public void testGetBulkExport_outputFormat_FhirNdJson_inHeader() throws IOException { // when @@ -1232,7 +1287,6 @@ public class BulkDataExportProviderTest { assertEquals(403, response.getStatusLine().getStatusCode()); assertEquals("Forbidden", response.getStatusLine().getReasonPhrase()); } - } static Stream paramsProvider() { @@ -1261,6 +1315,4 @@ public class BulkDataExportProviderTest { } } - - } diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index 9013e0c6b72..5fe6f234e0b 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index f91d4266b1a..87b7ba6da9a 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index 72fcdec2607..8a60ec85637 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index 434fa5b7fd1..8034cc4e83f 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index 4e2bbda8f5e..71e76447df6 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 5d85b54408b..29a8318010f 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index 6371abe0514..7079aab6795 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 03bba016e4a..4e8b692ad02 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index 41d0b9e5491..58fb8baf837 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index 47a646b0196..d74d649f38e 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index 1bb5308eec9..4f9fdfb83b6 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index 615bcaede67..de69a46093b 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index 4e9f671c79c..fa62b23b455 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 5514a27054c..ca4923acca7 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index 638c3933040..97d0fef42d6 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index d3e81bb97d3..d43c0903312 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index 6ec3497c477..f50c94f7006 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 133fa52cfd8..756142f7b57 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index e23fe9696d2..c87a70f6b22 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 25eee371362..4bdab3c0201 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index e10a99c7db0..179e3f7323d 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index 814d926048b..15cb3b2e512 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java index 05f5a95dbcc..da6cf525033 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java @@ -161,17 +161,38 @@ public class BulkDataExportProvider { } private void startJob(ServletRequestDetails theRequestDetails, BulkExportJobParameters theOptions) { + // parameter massaging + expandParameters(theRequestDetails, theOptions); + // permission check - HookParams params = (new HookParams()) + HookParams initiateBulkExportHookParams = (new HookParams()) .add(BulkExportJobParameters.class, theOptions) .add(RequestDetails.class, theRequestDetails) .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); CompositeInterceptorBroadcaster.doCallHooks( - this.myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_INITIATE_BULK_EXPORT, params); + this.myInterceptorBroadcaster, + theRequestDetails, + Pointcut.STORAGE_INITIATE_BULK_EXPORT, + initiateBulkExportHookParams); // get cache boolean boolean useCache = shouldUseCache(theRequestDetails); + // start job + JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); + startRequest.setParameters(theOptions); + startRequest.setUseCache(useCache); + startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT); + Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, startRequest); + + writePollingLocationToResponseHeaders(theRequestDetails, response.getInstanceId()); + } + + /** + * This method changes any parameters (limiting the _type parameter, for instance) + * so that later steps in the export do not have to handle them. + */ + private void expandParameters(ServletRequestDetails theRequestDetails, BulkExportJobParameters theOptions) { // Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and // is needed for the report. theOptions.setOriginalRequestUrl(theRequestDetails.getCompleteUrl()); @@ -190,14 +211,16 @@ public class BulkDataExportProvider { myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId); theOptions.setPartitionId(partitionId); - // start job - JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setParameters(theOptions); - startRequest.setUseCache(useCache); - startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT); - Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, startRequest); - - writePollingLocationToResponseHeaders(theRequestDetails, response.getInstanceId()); + // call hook so any other parameter manipulation can be done + HookParams preInitiateBulkExportHookParams = new HookParams(); + preInitiateBulkExportHookParams.add(BulkExportJobParameters.class, theOptions); + preInitiateBulkExportHookParams.add(RequestDetails.class, theRequestDetails); + preInitiateBulkExportHookParams.addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, + theRequestDetails, + Pointcut.STORAGE_PRE_INITIATE_BULK_EXPORT, + preInitiateBulkExportHookParams); } private boolean shouldUseCache(ServletRequestDetails theRequestDetails) { diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index 1762a3dbe65..c54f3431689 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index 7890ad1bfa3..d6d96153e10 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index 30a4b7d9731..d2744f52f89 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index 21684ec422d..6762591a2b9 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index ba4f65c01e5..e62f8c7e2bf 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index 7090925bf5f..64d3e7c6c2e 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index 146924739b1..c93c2bdb158 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 76dadd587ec..64485e7520e 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index cfddebcbaae..eaf3612ccd0 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 3ec00da7fc7..cd303bde32e 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 55a7aeba956..f6ebdd3c94b 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index 73801c099d6..59edff2a9d6 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 67862c00146..813116ef724 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index eb4c71307da..181461ece57 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 02489770f91..5caacf907a9 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 22142383e08..158075f8d8d 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index 27b3b119848..a7114fe0883 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index a33191a05ec..36197381a90 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 44b6a5f1805..d283ed7ab1d 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index d58235a44b1..d912140ab61 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 3bde201665c..b030ccb250b 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index b1432477175..375d4769355 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 368e8aaf6a9..6845e456f5c 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 99a2b4d97a8..4ade7bb82fd 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index a9efbc6be4a..7f8822aceb7 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 890e8f28ed4..a7cd1eba2a8 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 83b846622ad..aa7c4d23be7 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index a4d48372ff5..25a2406ef92 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.8-SNAPSHOT + 7.1.9-SNAPSHOT ../../pom.xml From a90ccde71cbf667ac86d6780c6d9eafdcc3b9db5 Mon Sep 17 00:00:00 2001 From: jmarchionatto <60409882+jmarchionatto@users.noreply.github.com> Date: Tue, 16 Apr 2024 13:51:50 -0400 Subject: [PATCH 13/26] Base test classes enhancements for gateway test configuration flexibility (#5821) * Allow tests to set paging provider by server * Allow tests to set target server port range --------- Co-authored-by: juan.marchionatto --- .../test/utilities/BaseRestServerHelper.java | 19 +++++++--- .../test/utilities/RestServerR4Helper.java | 37 ++++++++++--------- 2 files changed, 33 insertions(+), 23 deletions(-) diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/BaseRestServerHelper.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/BaseRestServerHelper.java index 17fca20e8d7..fb84a19d587 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/BaseRestServerHelper.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/BaseRestServerHelper.java @@ -28,24 +28,25 @@ import ca.uhn.fhir.rest.server.HardcodedServerAddressStrategy; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.IServerAddressStrategy; import ca.uhn.fhir.tls.KeyStoreType; +import jakarta.servlet.Servlet; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.http.HttpVersion; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.SslConnectionFactory; -import org.eclipse.jetty.ee10.servlet.ServletContextHandler; -import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.junit.jupiter.api.extension.RegisterExtension; -import jakarta.servlet.Servlet; import java.security.KeyStore; import java.util.List; public abstract class BaseRestServerHelper { + private static int myFirstTargetPort = -1; private final String SERVER_KEYSTORE_PATH = "/tls/server-keystore.p12"; private final String SERVER_TRUSTSTORE_PATH = "/tls/server-truststore.p12"; @@ -75,9 +76,11 @@ public abstract class BaseRestServerHelper { } protected void startServer(Servlet theServlet) throws Exception { - myListenerServer = new Server(0); + + int port = myFirstTargetPort == -1 ? 0 : myFirstTargetPort++; + myListenerServer = new Server(port); - myFhirContext.getRestfulClientFactory().setSocketTimeout(120000); + myFhirContext.getRestfulClientFactory().setSocketTimeout(120_000); ServletContextHandler proxyHandler = new ServletContextHandler(); proxyHandler.setContextPath("/"); @@ -119,7 +122,7 @@ public abstract class BaseRestServerHelper { myHttpsListenerPort = ((ServerConnector)myListenerServer.getConnectors()[1]).getLocalPort(); } - private SslContextFactory.Server getSslContextFactory() throws Exception{ + private SslContextFactory.Server getSslContextFactory() { try { SslContextFactory.Server sslContextFactory = new SslContextFactory.Server(); @@ -199,5 +202,9 @@ public abstract class BaseRestServerHelper { setServerAddressStrategy(strategy); } + public static void setMyFirstTargetPort(int theMyFirstTargetPort) { + myFirstTargetPort = theMyFirstTargetPort; + } + protected abstract void setServerAddressStrategy(IServerAddressStrategy theServerAddressStrategy); } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RestServerR4Helper.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RestServerR4Helper.java index 05db01c917e..37f8aa2daa4 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RestServerR4Helper.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RestServerR4Helper.java @@ -28,13 +28,16 @@ import ca.uhn.fhir.rest.api.PagingHttpMethodEnum; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider; +import ca.uhn.fhir.rest.server.IPagingProvider; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.IServerAddressStrategy; import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.provider.HashMapResourceProvider; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.eclipse.jetty.ee10.servlet.ServletApiRequest; -import org.eclipse.jetty.server.Request; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -48,9 +51,6 @@ import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; -import jakarta.servlet.ServletException; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -61,7 +61,8 @@ import java.util.Map; import java.util.stream.Collectors; public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEachCallback, AfterEachCallback { - protected final MyRestfulServer myRestServer; + private final MyRestfulServer myRestServer; + private static IPagingProvider myPagingProvider = new FifoMemoryPagingProvider(20); public RestServerR4Helper() { this(false, false); @@ -216,6 +217,10 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa myRestServer.setConceptMapResourceProvider(theResourceProvider); } + public void setPagingProvider(IPagingProvider thePagingProvider) { + myPagingProvider = thePagingProvider; + } + @Override public IIdType createPatientWithId(String theId) { Patient patient = new Patient(); @@ -331,8 +336,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa public void clearCounts() { for (IResourceProvider next : getResourceProviders()) { - if (next instanceof HashMapResourceProvider) { - HashMapResourceProvider provider = (HashMapResourceProvider) next; + if (next instanceof HashMapResourceProvider provider) { provider.clearCounts(); } } @@ -372,8 +376,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa public void clearDataAndCounts() { for (IResourceProvider next : getResourceProviders()) { - if (next instanceof HashMapResourceProvider) { - HashMapResourceProvider provider = (HashMapResourceProvider) next; + if (next instanceof HashMapResourceProvider provider) { provider.clear(); } } @@ -385,7 +388,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa } public void setObservationResourceProvider(HashMapResourceProvider theResourceProvider) { - myObservationResourceProvider.getStoredResources().forEach(o -> theResourceProvider.store(o)); + myObservationResourceProvider.getStoredResources().forEach(theResourceProvider::store); unregisterProvider(myObservationResourceProvider); registerProvider(theResourceProvider); @@ -401,7 +404,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa } public void setConceptMapResourceProvider(HashMapResourceProvider theResourceProvider) { - myConceptMapResourceProvider.getStoredResources().forEach(c -> theResourceProvider.store(c)); + myConceptMapResourceProvider.getStoredResources().forEach(theResourceProvider::store); unregisterProvider(myConceptMapResourceProvider); registerProvider(theResourceProvider); @@ -417,23 +420,23 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa super.initialize(); FhirContext fhirContext = getFhirContext(); - myPatientResourceProvider = new MyHashMapResourceProvider(fhirContext, Patient.class); + myPatientResourceProvider = new MyHashMapResourceProvider<>(fhirContext, Patient.class); registerProvider(myPatientResourceProvider); - myObservationResourceProvider = new MyHashMapResourceProvider(fhirContext, Observation.class); + myObservationResourceProvider = new MyHashMapResourceProvider<>(fhirContext, Observation.class); registerProvider(myObservationResourceProvider); - myOrganizationResourceProvider = new MyHashMapResourceProvider(fhirContext, Organization.class); + myOrganizationResourceProvider = new MyHashMapResourceProvider<>(fhirContext, Organization.class); registerProvider(myOrganizationResourceProvider); - myConceptMapResourceProvider = new MyHashMapResourceProvider(fhirContext, ConceptMap.class); + myConceptMapResourceProvider = new MyHashMapResourceProvider<>(fhirContext, ConceptMap.class); registerProvider(myConceptMapResourceProvider); myPlainProvider = new RestServerDstu3Helper.MyPlainProvider(myInitialTransactionLatchEnabled); registerProvider(myPlainProvider); - setPagingProvider(new FifoMemoryPagingProvider(20)); + setPagingProvider(myPagingProvider); } public class MyHashMapResourceProvider extends HashMapResourceProvider { - public MyHashMapResourceProvider(FhirContext theContext, Class theType) { + public MyHashMapResourceProvider(FhirContext theContext, Class theType) { super(theContext, theType); } From e400db5fb8f80ec4890bb8101fc695231de318d9 Mon Sep 17 00:00:00 2001 From: Justin McKelvy <60718638+Capt-Mac@users.noreply.github.com> Date: Thu, 18 Apr 2024 16:34:26 -0600 Subject: [PATCH 14/26] collect-data and data-requirements operations and tests (#5854) * collect-data and data-requirements operations and tests * remove unused imports * additional test coverage * move assertions out of method to pass static code analysis * camelCase test names * remove underscore on test methods * requested edits to changelog * version bump to 7.1.10-SNAPSHOT * condense unit tests to prevent pipeline timeout * method camel case naming --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- hapi-fhir-bom/pom.xml | 4 +- hapi-fhir-checkstyle/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 2 +- .../7_2_0/5850-migrate-cr-operations.yaml | 6 + hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jpa/pom.xml | 2 +- hapi-fhir-jpaserver-base/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-jpaserver-hfql/pom.xml | 2 +- hapi-fhir-jpaserver-ips/pom.xml | 2 +- hapi-fhir-jpaserver-mdm/pom.xml | 2 +- hapi-fhir-jpaserver-model/pom.xml | 2 +- hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu2/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu3/pom.xml | 2 +- hapi-fhir-jpaserver-test-r4/pom.xml | 2 +- hapi-fhir-jpaserver-test-r4b/pom.xml | 2 +- hapi-fhir-jpaserver-test-r5/pom.xml | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 2 +- hapi-fhir-server-cds-hooks/pom.xml | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- hapi-fhir-server-openapi/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- .../server/provider/ProviderConstants.java | 3 +- .../hapi-fhir-caching-api/pom.xml | 2 +- .../hapi-fhir-caching-caffeine/pom.xml | 4 +- .../hapi-fhir-caching-guava/pom.xml | 2 +- .../hapi-fhir-caching-testing/pom.xml | 2 +- hapi-fhir-serviceloaders/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-sql-migrate/pom.xml | 2 +- hapi-fhir-storage-batch2-jobs/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-storage-batch2/pom.xml | 2 +- hapi-fhir-storage-cr/pom.xml | 2 +- .../ca/uhn/fhir/cr/config/r4/CrR4Config.java | 32 ++- .../cr/r4/ICollectDataServiceFactory.java | 9 + .../r4/IDataRequirementsServiceFactory.java | 9 + .../r4/cpg/CqlExecutionOperationProvider.java | 4 +- .../measure/CollectDataOperationProvider.java | 56 +++++ .../DataRequirementsOperationProvider.java | 42 ++++ .../ca/uhn/fhir/cr/r4/CollectDataTest.java | 73 +++++++ .../fhir/cr/r4/CpgOperationProviderTest.java | 61 +++--- .../uhn/fhir/cr/r4/DataRequirementsTest.java | 49 +++++ .../fhir/cr/r4/HapiFhirRepositoryR4Test.java | 147 +++++++------- .../cr/r4/MeasureOperationProviderTest.java | 191 +++++++++--------- .../ca/uhn/fhir/cr/r4/TestCrR4Config.java | 2 - ...ColorectalCancerScreeningsFHIR-bundle.json | 39 +++- hapi-fhir-storage-mdm/pom.xml | 2 +- hapi-fhir-storage-test-utilities/pom.xml | 2 +- hapi-fhir-storage/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- hapi-fhir-structures-r4b/pom.xml | 2 +- hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r4b/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 2 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 4 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 92 files changed, 598 insertions(+), 285 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5850-migrate-cr-operations.yaml create mode 100644 hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java create mode 100644 hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java create mode 100644 hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java create mode 100644 hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java create mode 100644 hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CollectDataTest.java create mode 100644 hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/DataRequirementsTest.java diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 53ce20c4618..878f8e86233 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 255e4ffe450..e60ddcaff71 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index a491b2b05b5..1db4476b4d3 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index b7b350fe12c..e816820a5b6 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index cfd0c7ab084..5c2d84271c1 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index b01715cabb8..259e452b291 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index 149c8945a0f..0a92269dd80 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index d5a7d1adbe2..f4735a64673 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index a6f01526c67..42dca0baa55 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 03db163bf02..3b36b9a78cc 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index eb86eb2ffba..0fb7336aa4e 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 476a69d129c..bc58e77989b 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index 5531051b6c4..f06c114a1e0 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5850-migrate-cr-operations.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5850-migrate-cr-operations.yaml new file mode 100644 index 00000000000..37b8d1133e5 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5850-migrate-cr-operations.yaml @@ -0,0 +1,6 @@ +--- +type: add +issue: 5850 +title: "Add additional Clinical Reasoning operations $collect-data and $data-requirements to HAPI-FHIR to expand capability +for clinical reasoning module users. These operations will assist users in identifying data requirements for evaluating a measure, and what +data was used for a measure evaluation" diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index a58f0810b18..062612c14f4 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index ad7d3a76c82..8abee5044e7 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index dbd315186f3..90fe5c84f17 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 9314c0af2ad..6e69cafa0a1 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index 2be23223a1c..c7d8789b66b 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml index edb3b7bfbd6..21658470076 100644 --- a/hapi-fhir-jpaserver-hfql/pom.xml +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index 937f7b155e6..60ecf7dda97 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index e7b26745ade..9da334e1db8 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index ec13e7a2b20..75996628b21 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 0637fd3251b..3e759bdd325 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index fa48039813e..c7372d53fe0 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index b73bfd76afb..e983f23ef78 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index c931a7db4bc..f7109d2f830 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index 55d4b2259c0..91490d9aa59 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index 5fe6f234e0b..ca1a4a08fbd 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index 87b7ba6da9a..ca814fc3407 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index 8a60ec85637..0592981d2ce 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index 8034cc4e83f..bb49ce2cff0 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index 71e76447df6..5de3625c9fa 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 29a8318010f..33c0d53b3c7 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index 7079aab6795..8ebb87af6b9 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 4e8b692ad02..539c930b36f 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java index 9ab2132e1f8..be33d2829b5 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java @@ -123,7 +123,8 @@ public class ProviderConstants { public static final String CR_OPERATION_EXTRACT = "$extract"; public static final String CR_OPERATION_PACKAGE = "$package"; public static final String CR_OPERATION_QUESTIONNAIRE = "$questionnaire"; - + public static final String CR_OPERATION_COLLECTDATA = "$collect-data"; + public static final String CR_OPERATION_DATAREQUIREMENTS = "$data-requirements"; /** * Operation name for the $meta operation */ diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index 58fb8baf837..fd86c76c918 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index d74d649f38e..6f3ad9aebf0 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index 4f9fdfb83b6..083fd428a66 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index de69a46093b..d76aaae39a5 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index fa62b23b455..d8b0ef67672 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index ca4923acca7..a36a8106d02 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index 97d0fef42d6..5ab1e07f34c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index d43c0903312..89e66d269d4 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index f50c94f7006..9ab88bfcc86 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 756142f7b57..a3c882ef6b6 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index c87a70f6b22..65d782703f7 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 4bdab3c0201..7c12175c3ba 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index 179e3f7323d..dee88ccf8b7 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index 15cb3b2e512..d5db154e318 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index c54f3431689..c06a923d4dd 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index d6d96153e10..91f34741c1f 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index d2744f52f89..49893d00f35 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java index f08743d7909..19466eba19b 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java @@ -26,13 +26,17 @@ import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.cr.config.RepositoryConfig; import ca.uhn.fhir.cr.r4.ICareGapsServiceFactory; +import ca.uhn.fhir.cr.r4.ICollectDataServiceFactory; import ca.uhn.fhir.cr.r4.ICqlExecutionServiceFactory; +import ca.uhn.fhir.cr.r4.IDataRequirementsServiceFactory; import ca.uhn.fhir.cr.r4.ILibraryEvaluationServiceFactory; import ca.uhn.fhir.cr.r4.IMeasureServiceFactory; import ca.uhn.fhir.cr.r4.ISubmitDataProcessorFactory; import ca.uhn.fhir.cr.r4.cpg.CqlExecutionOperationProvider; import ca.uhn.fhir.cr.r4.cpg.LibraryEvaluationOperationProvider; import ca.uhn.fhir.cr.r4.measure.CareGapsOperationProvider; +import ca.uhn.fhir.cr.r4.measure.CollectDataOperationProvider; +import ca.uhn.fhir.cr.r4.measure.DataRequirementsOperationProvider; import ca.uhn.fhir.cr.r4.measure.MeasureOperationsProvider; import ca.uhn.fhir.cr.r4.measure.SubmitDataProvider; import ca.uhn.fhir.rest.server.RestfulServer; @@ -42,6 +46,8 @@ import org.opencds.cqf.fhir.cr.cpg.r4.R4LibraryEvaluationService; import org.opencds.cqf.fhir.cr.measure.CareGapsProperties; import org.opencds.cqf.fhir.cr.measure.MeasureEvaluationOptions; import org.opencds.cqf.fhir.cr.measure.r4.R4CareGapsService; +import org.opencds.cqf.fhir.cr.measure.r4.R4CollectDataService; +import org.opencds.cqf.fhir.cr.measure.r4.R4DataRequirementsService; import org.opencds.cqf.fhir.cr.measure.r4.R4MeasureService; import org.opencds.cqf.fhir.cr.measure.r4.R4SubmitDataService; import org.springframework.beans.factory.annotation.Qualifier; @@ -86,6 +92,28 @@ public class CrR4Config { return new CqlExecutionOperationProvider(); } + @Bean + CollectDataOperationProvider r4CollectDataOperationProvider() { + return new CollectDataOperationProvider(); + } + + @Bean + ICollectDataServiceFactory collectDataServiceFactory( + IRepositoryFactory theRepositoryFactory, MeasureEvaluationOptions theMeasureEvaluationOptions) { + return rd -> new R4CollectDataService(theRepositoryFactory.create(rd), theMeasureEvaluationOptions); + } + + @Bean + DataRequirementsOperationProvider r4DataRequirementsOperationProvider() { + return new DataRequirementsOperationProvider(); + } + + @Bean + IDataRequirementsServiceFactory dataRequirementsServiceFactory( + IRepositoryFactory theRepositoryFactory, MeasureEvaluationOptions theMeasureEvaluationOptions) { + return rd -> new R4DataRequirementsService(theRepositoryFactory.create(rd), theMeasureEvaluationOptions); + } + @Bean LibraryEvaluationOperationProvider r4LibraryEvaluationOperationProvider() { return new LibraryEvaluationOperationProvider(); @@ -132,7 +160,9 @@ public class CrR4Config { SubmitDataProvider.class, CareGapsOperationProvider.class, CqlExecutionOperationProvider.class, - LibraryEvaluationOperationProvider.class))); + LibraryEvaluationOperationProvider.class, + CollectDataOperationProvider.class, + DataRequirementsOperationProvider.class))); return new ProviderLoader(theRestfulServer, theApplicationContext, selector); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java new file mode 100644 index 00000000000..fcd883576bf --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java @@ -0,0 +1,9 @@ +package ca.uhn.fhir.cr.r4; + +import ca.uhn.fhir.rest.api.server.RequestDetails; +import org.opencds.cqf.fhir.cr.measure.r4.R4CollectDataService; + +@FunctionalInterface +public interface ICollectDataServiceFactory { + R4CollectDataService create(RequestDetails theRequestDetails); +} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java new file mode 100644 index 00000000000..f5de302cd97 --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java @@ -0,0 +1,9 @@ +package ca.uhn.fhir.cr.r4; + +import ca.uhn.fhir.rest.api.server.RequestDetails; +import org.opencds.cqf.fhir.cr.measure.r4.R4DataRequirementsService; + +@FunctionalInterface +public interface IDataRequirementsServiceFactory { + R4DataRequirementsService create(RequestDetails theRequestDetails); +} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/cpg/CqlExecutionOperationProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/cpg/CqlExecutionOperationProvider.java index ba2d0c36b7f..bf0c6c6f958 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/cpg/CqlExecutionOperationProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/cpg/CqlExecutionOperationProvider.java @@ -113,7 +113,7 @@ public class CqlExecutionOperationProvider { * is a CQL system-defined or FHIR-defined type, the result is returned * as a {@link Parameters} Parameters resource */ - @Operation(name = ProviderConstants.CR_OPERATION_CQL) + @Operation(name = ProviderConstants.CR_OPERATION_CQL, idempotent = true) @Description( shortDefinition = "$cql", value = @@ -145,6 +145,6 @@ public class CqlExecutionOperationProvider { theDataEndpoint, theContentEndpoint, theTerminologyEndpoint, - null); + theContent); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java new file mode 100644 index 00000000000..bdaec6cc8ac --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java @@ -0,0 +1,56 @@ +package ca.uhn.fhir.cr.r4.measure; + +import ca.uhn.fhir.cr.r4.ICollectDataServiceFactory; +import ca.uhn.fhir.model.api.annotation.Description; +import ca.uhn.fhir.rest.annotation.IdParam; +import ca.uhn.fhir.rest.annotation.Operation; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Measure; +import org.hl7.fhir.r4.model.Parameters; +import org.springframework.beans.factory.annotation.Autowired; + +public class CollectDataOperationProvider { + @Autowired + ICollectDataServiceFactory myR4CollectDataServiceFactory; + /** + * Implements the $collect-data + * operation found in the + * FHIR Clinical + * Reasoning Module. + * + *

+ * Returns a set of parameters with the generated MeasureReport and the + * resources that were used during the Measure evaluation + * + * @param theRequestDetails generally auto-populated by the HAPI server + * framework. + * @param theId the Id of the Measure to sub data for + * @param thePeriodStart The start of the reporting period + * @param thePeriodEnd The end of the reporting period + * @param theSubject the subject to use for the evaluation + * @param thePractitioner the practitioner to use for the evaluation + * + * @return Parameters the parameters containing the MeasureReport and the + * evaluated Resources + */ + @Description( + shortDefinition = "$collect-data", + value = + "Implements the $collect-data operation found in the FHIR Clinical Reasoning Module.") + @Operation(name = ProviderConstants.CR_OPERATION_COLLECTDATA, idempotent = true, type = Measure.class) + public Parameters collectData( + @IdParam IdType theId, + @OperationParam(name = "periodStart") String thePeriodStart, + @OperationParam(name = "periodEnd") String thePeriodEnd, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "practitioner") String thePractitioner, + RequestDetails theRequestDetails) { + return myR4CollectDataServiceFactory + .create(theRequestDetails) + .collectData(theId, thePeriodStart, thePeriodEnd, theSubject, thePractitioner); + } +} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java new file mode 100644 index 00000000000..9c801d91d39 --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java @@ -0,0 +1,42 @@ +package ca.uhn.fhir.cr.r4.measure; + +import ca.uhn.fhir.cr.r4.IDataRequirementsServiceFactory; +import ca.uhn.fhir.rest.annotation.IdParam; +import ca.uhn.fhir.rest.annotation.Operation; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Library; +import org.hl7.fhir.r4.model.Measure; +import org.springframework.beans.factory.annotation.Autowired; + +public class DataRequirementsOperationProvider { + @Autowired + IDataRequirementsServiceFactory myR4DataRequirementsServiceFactory; + /** + * Implements the $evaluate-measure + * operation found in the + * FHIR Clinical + * Reasoning Module. This implementation aims to be compatible with the CQF + * IG. + * + * @param theId the id of the Measure to evaluate + * @param thePeriodStart The start of the reporting period + * @param thePeriodEnd The end of the reporting period + * @param theRequestDetails The details (such as tenant) of this request. Usually + * autopopulated HAPI. + * @return the calculated Library dataRequirements + */ + @Operation(name = ProviderConstants.CR_OPERATION_DATAREQUIREMENTS, idempotent = true, type = Measure.class) + public Library dataRequirements( + @IdParam IdType theId, + @OperationParam(name = "periodStart") String thePeriodStart, + @OperationParam(name = "periodEnd") String thePeriodEnd, + RequestDetails theRequestDetails) { + return myR4DataRequirementsServiceFactory + .create(theRequestDetails) + .dataRequirements(theId, thePeriodStart, thePeriodEnd); + } +} diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CollectDataTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CollectDataTest.java new file mode 100644 index 00000000000..f85dcced5f9 --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CollectDataTest.java @@ -0,0 +1,73 @@ +package ca.uhn.fhir.cr.r4; + +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.r4.model.DateType; +import org.hl7.fhir.r4.model.Parameters; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +@ExtendWith(SpringExtension.class) +public class CollectDataTest extends BaseCrR4TestServer{ + public Parameters runCollectData(String thePeriodStart, String thePeriodEnd, String theMeasureId, String theSubject, String thePractitioner){ + + var parametersEval = new Parameters(); + parametersEval.addParameter("periodStart", new DateType(thePeriodStart)); + parametersEval.addParameter("periodEnd", new DateType(thePeriodEnd)); + parametersEval.addParameter("practitioner", thePractitioner); + parametersEval.addParameter("subject", theSubject); + + + var report = ourClient.operation().onInstance("Measure/" + theMeasureId) + .named(ProviderConstants.CR_OPERATION_COLLECTDATA) + .withParameters(parametersEval) + .returnResourceType(Parameters.class) + .execute(); + + return report; + } + @Test + void testMeasureDataRequirements() { + loadBundle("ColorectalCancerScreeningsFHIR-bundle.json"); + var reportAllSubjects = runCollectData("2019-01-01", "2019-12-31", "ColorectalCancerScreeningsFHIR", null, null); + Assertions.assertFalse(reportAllSubjects.getParameter().isEmpty()); + // use same loaded bundle for all tests + testCollectDataSubject(); + testCollectDataGroup(); + testCollectDataPractitioner(); + testCollectDataInvalidInterval(); + testCollectDataInvalidMeasure(); + } + + void testCollectDataSubject() { + var report = runCollectData("2019-01-01", "2019-12-31", "ColorectalCancerScreeningsFHIR", "Patient/numer-EXM130", null); + Assertions.assertFalse(report.getParameter().isEmpty()); + } + + + void testCollectDataGroup() { + var report = runCollectData("2019-01-01", "2019-12-31", "ColorectalCancerScreeningsFHIR", "Group/group-EXM130", null); + Assertions.assertFalse(report.getParameter().isEmpty()); + } + + + void testCollectDataPractitioner() { + var report = runCollectData("2019-01-01", "2019-12-31", "ColorectalCancerScreeningsFHIR", null, "Practitioner/practitioner-EXM130"); + Assertions.assertFalse(report.getParameter().isEmpty()); + } + + + void testCollectDataInvalidInterval() { + assertThrows(InternalErrorException.class, ()->runCollectData("2020-01-01", "2019-12-31", "ColorectalCancerScreeningsFHIR", null, null)); + } + + + void testCollectDataInvalidMeasure() { + assertThrows(ResourceNotFoundException.class, ()->runCollectData("2019-01-01", "2019-12-31", "ColorectalCancerScreeningsFHI", null, null)); + } +} diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CpgOperationProviderTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CpgOperationProviderTest.java index 9f56c02ba2f..6f437157c17 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CpgOperationProviderTest.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/CpgOperationProviderTest.java @@ -30,23 +30,37 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class CpgOperationProviderTest extends BaseCrR4TestServer{ @BeforeEach void setup() { - var reqDeets = setupRequestDetails(); - loadResource(Library.class, "SimpleR4Library.json", reqDeets); - loadResource(Patient.class, "SimplePatient.json", reqDeets); - loadResource(Observation.class, "SimpleObservation.json", reqDeets); - loadResource(Condition.class, "SimpleCondition.json", reqDeets); + var requestDetails = setupRequestDetails(); + loadResource(Library.class, "SimpleR4Library.json", requestDetails); + loadResource(Patient.class, "SimplePatient.json", requestDetails); + loadResource(Observation.class, "SimpleObservation.json", requestDetails); + loadResource(Condition.class, "SimpleCondition.json", requestDetails); + } + @Test + void cpgProviderTest(){ + // reuse loaded resources for all tests + assertTrue(cqlExecutionProviderTestSimpleDate()); + cqlExecutionProviderTestSimpleArithmetic(); + evaluateLibraryProviderTestLibraryWithSubject(); + evaluateLibraryProviderTestSimpleExpression(); + cqlExecutionProviderTestReferencedLibrary(); + cqlExecutionProviderTestDataBundle(); + cqlExecutionProviderTestDataBundleWithSubject(); + cqlExecutionProviderTestSimpleParameters(); + cqlExecutionProviderTestExpression(); + cqlExecutionProviderTestErrorExpression(); } - @Test - void cqlExecutionProvider_testSimpleDate() { + + private Boolean cqlExecutionProviderTestSimpleDate() { // execute cql expression on date interval Parameters params = parameters(stringPart("expression", "Interval[Today() - 2 years, Today())")); Parameters results = runCqlExecution(params); - assertTrue(results.getParameter("return").getValue() instanceof Period); + return results.getParameter("return").getValue() instanceof Period; } - @Test - void cqlExecutionProvider_testSimpleArithmetic() { + + void cqlExecutionProviderTestSimpleArithmetic() { // execute simple cql expression Parameters params = parameters(stringPart("expression", "5 * 5")); Parameters results = runCqlExecution(params); @@ -54,8 +68,7 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertEquals("25", ((IntegerType) results.getParameter("return").getValue()).asStringValue()); } - @Test - void evaluateLibraryProvider_testLibraryWithSubject() { + void evaluateLibraryProviderTestLibraryWithSubject() { // evaluate library resource for a subject var params = new Parameters(); params.addParameter("subject", new StringType("Patient/SimplePatient")); @@ -71,8 +84,8 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertTrue(((BooleanType) report.getParameter("Denominator").getValue()).booleanValue()); } - @Test - void evaluateLibraryProvider_testSimpleExpression() { + + void evaluateLibraryProviderTestSimpleExpression() { // evaluate expression for subject from specified library resource var params = new Parameters(); params.addParameter("subject", new StringType("Patient/SimplePatient")); @@ -84,8 +97,7 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertTrue(((BooleanType) report.getParameter("Numerator").getValue()).booleanValue()); } - @Test - void cqlExecutionProvider_testReferencedLibrary() { + void cqlExecutionProviderTestReferencedLibrary() { // execute cql expression from referenced library on subject Parameters libraryParameter = parameters( canonicalPart("url", ourClient.getServerBase() + "/Library/SimpleR4Library|0.0.1"), @@ -100,8 +112,7 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertTrue(((BooleanType) results.getParameter("return").getValue()).booleanValue()); } - @Test - void cqlExecutionProvider_testDataBundle() { + void cqlExecutionProviderTestDataBundle() { // execute cql expression from library over data from bundle with no subject Parameters libraryParameter = parameters( canonicalPart("url", ourClient.getServerBase() + "/Library/SimpleR4Library"), @@ -118,8 +129,8 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertTrue(results.getParameter().get(0).getResource() instanceof Observation); } - @Test - void cqlExecutionProvider_testDataBundleWithSubject() { + + void cqlExecutionProviderTestDataBundleWithSubject() { // execute cql expression from library over data from bundle with subject Parameters libraryParameter = parameters( canonicalPart("url", ourClient.getServerBase() + "/Library/SimpleR4Library"), @@ -135,8 +146,8 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertTrue(results.getParameter().get(0).getResource() instanceof Observation); } - @Test - void cqlExecutionProvider_testSimpleParameters() { + + void cqlExecutionProviderTestSimpleParameters() { // execute inline cql date expression with input valuemv Parameters evaluationParams = parameters( datePart("%inputDate", "2019-11-01")); @@ -148,8 +159,7 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertTrue(((BooleanType) results.getParameter("return").getValue()).booleanValue()); } - @Test - void cqlExecutionProvider_testExpression() { + void cqlExecutionProviderTestExpression() { // execute cql expression from referenced library Parameters libraryParameter = parameters( canonicalPart("url", ourClient.getServerBase() + "/Library/SimpleR4Library"), @@ -168,8 +178,7 @@ public class CpgOperationProviderTest extends BaseCrR4TestServer{ assertTrue(((BooleanType) results.getParameter("return").getValue()).booleanValue()); } - @Test - void cqlExecutionProvider_testErrorExpression() { + void cqlExecutionProviderTestErrorExpression() { // execute invalid cql expression Parameters params = parameters(stringPart("expression", "Interval[1,5]")); diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/DataRequirementsTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/DataRequirementsTest.java new file mode 100644 index 00000000000..1baf2ea012d --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/DataRequirementsTest.java @@ -0,0 +1,49 @@ +package ca.uhn.fhir.cr.r4; + +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.r4.model.DateType; +import org.hl7.fhir.r4.model.Library; +import org.hl7.fhir.r4.model.Parameters; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +@ExtendWith(SpringExtension.class) +public class DataRequirementsTest extends BaseCrR4TestServer{ + public Library runDataRequirements(String periodStart, String periodEnd, String measureId){ + + var parametersEval = new Parameters(); + parametersEval.addParameter("periodStart", new DateType(periodStart)); + parametersEval.addParameter("periodEnd", new DateType(periodEnd)); + + + var report = ourClient.operation().onInstance("Measure/" + measureId) + .named(ProviderConstants.CR_OPERATION_DATAREQUIREMENTS) + .withParameters(parametersEval) + .returnResourceType(Library.class) + .execute(); + + return report; + + } + @Test + void testMeasureDataRequirements() { + loadBundle("ColorectalCancerScreeningsFHIR-bundle.json"); + Assertions.assertFalse(runDataRequirements("2019-01-01", "2019-12-31", "ColorectalCancerScreeningsFHIR").getDataRequirement().isEmpty()); + testMeasureDataRequirementsInvalidInterval(); + testMeasureDataRequirementsInvalidMeasure(); + } + void testMeasureDataRequirementsInvalidInterval() { + assertThrows(InternalErrorException.class, ()->runDataRequirements("2020-01-01", "2019-12-31", "ColorectalCancerScreeningsFHIR")); + } + + void testMeasureDataRequirementsInvalidMeasure() { + assertThrows(ResourceNotFoundException.class, ()->runDataRequirements("2019-01-01", "2019-12-31", "ColorectalCancerScreeningsFHI")); + } + +} diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/HapiFhirRepositoryR4Test.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/HapiFhirRepositoryR4Test.java index dfb9bacb2d9..77e4a36414d 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/HapiFhirRepositoryR4Test.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/HapiFhirRepositoryR4Test.java @@ -5,7 +5,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.*; -import java.util.stream.Collectors; import ca.uhn.fhir.rest.param.*; import ca.uhn.fhir.rest.server.RestfulServer; @@ -15,6 +14,7 @@ import org.hl7.fhir.r4.model.HumanName; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Immunization; import org.hl7.fhir.r4.model.Patient; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import ca.uhn.fhir.cr.repo.HapiFhirRepository; @@ -28,13 +28,28 @@ public class HapiFhirRepositoryR4Test extends BaseCrR4TestServer { private static final String MY_TEST_DATA = "ca/uhn/fhir/cr/r4/immunization/Patients_Encounters_Immunizations_Practitioners.json"; + @BeforeEach + void setup() { + loadBundle("ColorectalCancerScreeningsFHIR-bundle.json"); + loadBundle(MY_TEST_DATA); + } @Test - void crudTest() { - var requestDetails = setupRequestDetails(); - //register repo - //regster providers - var repository = new HapiFhirRepository(myDaoRegistry, requestDetails, myRestfulServer); - var result = repository + void repositoryTests(){ + var repository = new HapiFhirRepository(myDaoRegistry, setupRequestDetails(), myRestfulServer); + //run repository tests + transactionReadsImmunizationResources(repository); + repositorySearchForEncounterWithMatchingCode(repository); + canSearchMoreThan50Patients(repository); + canSearchWithPagination(repository); + transactionReadsPatientResources(repository); + transactionReadsEncounterResources(repository); + assertTrue(crudTest(repository)); + } + + + Boolean crudTest(HapiFhirRepository theRepository) { + + var result = theRepository .create(new Patient().addName(new HumanName().setFamily("Test").addGiven("Name1"))); assertEquals(true, result.getCreated()); var patient = (Patient) result.getResource(); @@ -42,64 +57,24 @@ public class HapiFhirRepositoryR4Test extends BaseCrR4TestServer { assertEquals("Test", patient.getName().get(0).getFamily()); assertEquals(1, patient.getName().get(0).getGiven().size()); patient.getName().get(0).addGiven("Name2"); - repository.update(patient); - var updatedPatient = repository.read(Patient.class, patient.getIdElement()); + theRepository.update(patient); + var updatedPatient = theRepository.read(Patient.class, patient.getIdElement()); assertEquals(2, updatedPatient.getName().get(0).getGiven().size()); - repository.delete(Patient.class, patient.getIdElement()); + theRepository.delete(Patient.class, patient.getIdElement()); var ex = assertThrows(Exception.class, - () -> repository.read(Patient.class, new IdType(patient.getIdElement().getIdPart()))); - assertTrue(ex.getMessage().contains("Resource was deleted")); + () -> theRepository.read(Patient.class, new IdType(patient.getIdElement().getIdPart()))); + return ex.getMessage().contains("Resource was deleted"); } - @Test - void canSearchMoreThan50Patients() { - loadBundle(MY_TEST_DATA); - var expectedPatientCount = 63; + void canSearchMoreThan50Patients(HapiFhirRepository theRepository) { + var expectedPatientCount = 65; ourPagingProvider.setMaximumPageSize(100); - var repository = new HapiFhirRepository(myDaoRegistry, setupRequestDetails(), myRestfulServer); // get all patient resources posted - var result = repository.search(Bundle.class, Patient.class, withCountParam(100)); + var result = theRepository.search(Bundle.class, Patient.class, withCountParam(100)); assertEquals(expectedPatientCount, result.getTotal()); // count all resources in result int counter = 0; - for (var e : result.getEntry()) { - counter++; - } - // verify all patient resources captured - assertEquals(expectedPatientCount, counter, - "Patient search results don't match available resources"); - } - - @Test - void canSearchWithPagination() { - loadBundle(MY_TEST_DATA); - - var requestDetails = setupRequestDetails(); - var repository = new HapiFhirRepository(myDaoRegistry, requestDetails, myRestfulServer); - var result = repository.search(Bundle.class, Patient.class, withCountParam(20)); - assertEquals(20, result.getEntry().size()); - var next = result.getLink().get(1); - assertEquals("next", next.getRelation()); - var nextUrl = next.getUrl(); - var nextResult = repository.link(Bundle.class, nextUrl); - assertEquals(20, nextResult.getEntry().size()); - assertEquals(false, - result.getEntry().stream().map(e -> e.getResource().getIdPart()).anyMatch( - i -> nextResult.getEntry().stream().map(e -> e.getResource().getIdPart()) - .collect(Collectors.toList()).contains(i))); - } - - @Test - void transactionReadsPatientResources() { - var expectedPatientCount = 63; - var theBundle = readResource(Bundle.class, MY_TEST_DATA); - ourPagingProvider.setMaximumPageSize(100); - var repository = new HapiFhirRepository(myDaoRegistry, setupRequestDetails(), myRestfulServer); - repository.transaction(theBundle); - var result = repository.search(Bundle.class, Patient.class, withCountParam(100)); - // count all resources in result - int counter = 0; for (Object i : result.getEntry()) { counter++; } @@ -108,14 +83,41 @@ public class HapiFhirRepositoryR4Test extends BaseCrR4TestServer { "Patient search results don't match available resources"); } - @Test - void transactionReadsEncounterResources() { - var expectedEncounterCount = 652; - var theBundle = readResource(Bundle.class, MY_TEST_DATA); + + void canSearchWithPagination(HapiFhirRepository theRepository) { + + var result = theRepository.search(Bundle.class, Patient.class, withCountParam(20)); + assertEquals(20, result.getEntry().size()); + var next = result.getLink().get(1); + assertEquals("next", next.getRelation()); + var nextUrl = next.getUrl(); + var nextResult = theRepository.link(Bundle.class, nextUrl); + assertEquals(20, nextResult.getEntry().size()); + assertEquals(false, + result.getEntry().stream().map(e -> e.getResource().getIdPart()).anyMatch( + i -> nextResult.getEntry().stream().map(e -> e.getResource().getIdPart()) + .toList().contains(i))); + } + + + void transactionReadsPatientResources(HapiFhirRepository theRepository) { + var expectedPatientCount = 65; + ourPagingProvider.setMaximumPageSize(100); + var result = theRepository.search(Bundle.class, Patient.class, withCountParam(100)); + // count all resources in result + int counter = 0; + for (Object i : result.getEntry()) { + counter++; + } + // verify all patient resources captured + assertEquals(expectedPatientCount, counter, + "Patient search results don't match available resources"); + } + + void transactionReadsEncounterResources(HapiFhirRepository theRepository) { + var expectedEncounterCount = 654; ourPagingProvider.setMaximumPageSize(1000); - var repository = new HapiFhirRepository(myDaoRegistry, setupRequestDetails(), myRestfulServer); - repository.transaction(theBundle); - var result = repository.search(Bundle.class, Encounter.class, withCountParam(1000)); + var result = theRepository.search(Bundle.class, Encounter.class, withCountParam(1000)); // count all resources in result int counter = 0; for (Object i : result.getEntry()) { @@ -125,10 +127,8 @@ public class HapiFhirRepositoryR4Test extends BaseCrR4TestServer { assertEquals(expectedEncounterCount, counter, "Encounter search results don't match available resources"); } - - @Test - void repositorySearchForEncounterWithMatchingCode() { - loadBundle("ColorectalCancerScreeningsFHIR-bundle.json"); + + void repositorySearchForEncounterWithMatchingCode(HapiFhirRepository theRepository) { //SearchConverter validation test for repository List codeList = new ArrayList<>(); @@ -152,10 +152,8 @@ public class HapiFhirRepositoryR4Test extends BaseCrR4TestServer { // replicate repository searchParam list Map> searchParams = Map.of("type", codeList, "subject", Collections.singletonList(new ReferenceParam("Patient/numer-EXM130"))); - var repository = new HapiFhirRepository(myDaoRegistry, setupRequestDetails(), myRestfulServer); - // replicate search for valueset codes - var result = repository.search(Bundle.class, Encounter.class, searchParams); + var result = theRepository.search(Bundle.class, Encounter.class, searchParams); // count all resources in result int counter = 0; @@ -166,15 +164,12 @@ public class HapiFhirRepositoryR4Test extends BaseCrR4TestServer { assertEquals(1, counter, "Encounter search results don't match available resources"); } - - @Test - void transactionReadsImmunizationResources() { + + void transactionReadsImmunizationResources(HapiFhirRepository theRepository) { var expectedEncounterCount = 638; - var theBundle = readResource(Bundle.class, MY_TEST_DATA); ourPagingProvider.setMaximumPageSize(1000); - var repository = new HapiFhirRepository(myDaoRegistry, setupRequestDetails(), myRestfulServer); - repository.transaction(theBundle); - var result = repository.search(Bundle.class, Immunization.class, withCountParam(1000)); + + var result = theRepository.search(Bundle.class, Immunization.class, withCountParam(1000)); // count all resources in result int counter = 0; for (Object i : result.getEntry()) { diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/MeasureOperationProviderTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/MeasureOperationProviderTest.java index 1b4e9aa7a71..4a6ad5f6b06 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/MeasureOperationProviderTest.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/MeasureOperationProviderTest.java @@ -7,6 +7,7 @@ import org.hl7.fhir.r4.model.MeasureReport; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Resource; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -21,6 +22,27 @@ import static org.junit.jupiter.api.Assertions.assertTrue; @ExtendWith(SpringExtension.class) class MeasureOperationProviderTest extends BaseCrR4TestServer { + @BeforeEach + void setup() { + // load resources + loadBundle("ColorectalCancerScreeningsFHIR-bundle.json"); + loadBundle("Exm104FhirR4MeasureBundle.json"); + loadBundle("ClientNonPatientBasedMeasureBundle.json"); + loadBundle("multiversion/EXM124-7.0.000-bundle.json"); + loadBundle("multiversion/EXM124-9.0.000-bundle.json"); + loadBundle("largeValueSetMeasureTest-Bundle.json"); + loadBundle("BCSEHEDISMY2022-bundle.json"); + } + @Test + void runMeasureTests(){ + // run tests + assertNotNull(testMeasureEvaluateExm130()); + assertNotNull(testMeasureEvaluateExm104()); + testClientNonPatientBasedMeasureEvaluate(); + testMeasureEvaluateMultiVersion(); + testLargeValuesetMeasure(); + testHedis2022(); + } public MeasureReport runEvaluateMeasure(String periodStart, String periodEnd, String subject, String measureId, String reportType, String practitioner){ @@ -31,43 +53,96 @@ class MeasureOperationProviderTest extends BaseCrR4TestServer { parametersEval.addParameter("reportType", reportType); parametersEval.addParameter("subject", subject); - var report = ourClient.operation().onInstance("Measure/" + measureId) + return ourClient.operation().onInstance("Measure/" + measureId) .named(ProviderConstants.CR_OPERATION_EVALUATE_MEASURE) .withParameters(parametersEval) .returnResourceType(MeasureReport.class) .execute(); - - assertNotNull(report); - - return report; + } + MeasureReport testMeasureEvaluateExm130() { + return runEvaluateMeasure("2019-01-01", "2019-12-31", "Patient/numer-EXM130", "ColorectalCancerScreeningsFHIR", "Individual", null); } - @Test - void testMeasureEvaluate_EXM130() { - loadBundle("ColorectalCancerScreeningsFHIR-bundle.json"); - runEvaluateMeasure("2019-01-01", "2019-12-31", "Patient/numer-EXM130", "ColorectalCancerScreeningsFHIR", "Individual", null); - } - @Test - void testMeasureEvaluate_EXM104() { - loadBundle("Exm104FhirR4MeasureBundle.json"); - runEvaluateMeasure("2019-01-01", "2019-12-31", "Patient/numer-EXM104", "measure-EXM104-8.2.000", "Individual", null); + MeasureReport testMeasureEvaluateExm104() { + return runEvaluateMeasure("2019-01-01", "2019-12-31", "Patient/numer-EXM104", "measure-EXM104-8.2.000", "Individual", null); } + void testHedis2022() { + + runWithPatient("BCSEHEDISMY2022", "Patient/Patient-5", 0, 0, 0, 0, false, + "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); + runWithPatient("BCSEHEDISMY2022", "Patient/Patient-7", 1, 1, 0, 0, true, + "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); + runWithPatient("BCSEHEDISMY2022", "Patient/Patient-9", 0, 0, 0, 0, true, + "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); + runWithPatient("BCSEHEDISMY2022", "Patient/Patient-21", 1, 0, 1, 0, true, + "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); + runWithPatient("BCSEHEDISMY2022", "Patient/Patient-23", 1, 1, 0, 0, true, + "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); + runWithPatient("BCSEHEDISMY2022", "Patient/Patient-65", 1, 1, 0, 1, true, + "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); + } + + void testClientNonPatientBasedMeasureEvaluate() { + + var measure = read(new IdType("Measure", "InitialInpatientPopulation")); + assertNotNull(measure); + + var returnMeasureReport = runEvaluateMeasure("2019-01-01", "2020-01-01", "Patient/97f27374-8a5c-4aa1-a26f-5a1ab03caa47", "InitialInpatientPopulation", "Individual", null); + + + String populationName = "initial-population"; + int expectedCount = 2; + + Optional population = returnMeasureReport.getGroup().get(0) + .getPopulation().stream().filter(x -> x.hasCode() && x.getCode().hasCoding() + && x.getCode().getCoding().get(0).getCode().equals(populationName)) + .findFirst(); + + assertTrue(population.isPresent(), String.format("Unable to locate a population with id \"%s\"", populationName)); + assertEquals(population.get().getCount(), expectedCount, + String.format("expected count for population \"%s\" did not match", populationName)); + } + void testMeasureEvaluateMultiVersion() { + + assertNotNull(runEvaluateMeasure("2019-01-01", "2020-01-01", "Patient/numer-EXM124", "measure-EXM124-7.0.000", "Individual", null)); + assertNotNull(runEvaluateMeasure("2019-01-01", "2020-01-01", "Patient/numer-EXM124", "measure-EXM124-9.0.000", "Individual", null)); + + } + + void testLargeValuesetMeasure() throws NoSuchElementException { + + var returnMeasureReport = runEvaluateMeasure("2023-01-01", "2024-01-01", null, "CMSTest", "population", null); + + String populationName = "numerator"; + int expectedCount = 1; + + Optional population = returnMeasureReport.getGroup().get(0) + .getPopulation().stream().filter(x -> x.hasCode() && x.getCode().hasCoding() + && x.getCode().getCoding().get(0).getCode().equals(populationName)) + .findFirst(); + assertTrue(population.isPresent(), String.format("population \"%s\" not found in report", populationName)); + assertEquals( + expectedCount, + population.get().getCount(), + String.format("expected count for population \"%s\" did not match", populationName) + ); + } private void runWithPatient(String measureId, String patientId, int initialPopulationCount, int denominatorCount, - int denominatorExclusionCount, int numeratorCount, boolean enrolledDuringParticipationPeriod, - String participationPeriod) { + int denominatorExclusionCount, int numeratorCount, boolean enrolledDuringParticipationPeriod, + String participationPeriod) { var returnMeasureReport = runEvaluateMeasure("2022-01-01", "2022-12-31", patientId, measureId, "Individual", null); for (MeasureReport.MeasureReportGroupPopulationComponent population : returnMeasureReport.getGroupFirstRep() .getPopulation()) - switch (population.getCode().getCodingFirstRep().getCode()) { - case "initial-population" -> assertEquals(initialPopulationCount, population.getCount()); - case "denominator" -> assertEquals(denominatorCount, population.getCount()); - case "denominator-exclusion" -> assertEquals(denominatorExclusionCount, population.getCount()); - case "numerator" -> assertEquals(numeratorCount, population.getCount()); - } + switch (population.getCode().getCodingFirstRep().getCode()) { + case "initial-population" -> assertEquals(initialPopulationCount, population.getCount()); + case "denominator" -> assertEquals(denominatorCount, population.getCount()); + case "denominator-exclusion" -> assertEquals(denominatorExclusionCount, population.getCount()); + case "numerator" -> assertEquals(numeratorCount, population.getCount()); + } Observation enrolledDuringParticipationPeriodObs = null; Observation participationPeriodObs = null; @@ -88,76 +163,4 @@ class MeasureOperationProviderTest extends BaseCrR4TestServer { assertNotNull(participationPeriodObs); assertEquals(participationPeriod, participationPeriodObs.getValueCodeableConcept().getCodingFirstRep().getCode()); } - - @Test - void testBCSEHEDISMY2022() { - loadBundle("BCSEHEDISMY2022-bundle.json"); - - runWithPatient("BCSEHEDISMY2022", "Patient/Patient-5", 0, 0, 0, 0, false, - "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); - runWithPatient("BCSEHEDISMY2022", "Patient/Patient-7", 1, 1, 0, 0, true, - "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); - runWithPatient("BCSEHEDISMY2022", "Patient/Patient-9", 0, 0, 0, 0, true, - "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); - runWithPatient("BCSEHEDISMY2022", "Patient/Patient-21", 1, 0, 1, 0, true, - "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); - runWithPatient("BCSEHEDISMY2022", "Patient/Patient-23", 1, 1, 0, 0, true, - "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); - runWithPatient("BCSEHEDISMY2022", "Patient/Patient-65", 1, 1, 0, 1, true, - "Interval[2020-10-01T00:00:00.000, 2022-12-31T23:59:59.999]"); - } - - @Test - void testClientNonPatientBasedMeasureEvaluate() { - this.loadBundle("ClientNonPatientBasedMeasureBundle.json"); - - var measure = read(new IdType("Measure", "InitialInpatientPopulation")); - assertNotNull(measure); - - var returnMeasureReport = runEvaluateMeasure("2019-01-01", "2020-01-01", "Patient/97f27374-8a5c-4aa1-a26f-5a1ab03caa47", "InitialInpatientPopulation", "Individual", null); - - - String populationName = "initial-population"; - int expectedCount = 2; - - Optional population = returnMeasureReport.getGroup().get(0) - .getPopulation().stream().filter(x -> x.hasCode() && x.getCode().hasCoding() - && x.getCode().getCoding().get(0).getCode().equals(populationName)) - .findFirst(); - - assertTrue(population.isPresent(), String.format("Unable to locate a population with id \"%s\"", populationName)); - assertEquals(population.get().getCount(), expectedCount, - String.format("expected count for population \"%s\" did not match", populationName)); - } - - @Test - void testMeasureEvaluateMultiVersion() { - this.loadBundle("multiversion/EXM124-7.0.000-bundle.json"); - this.loadBundle("multiversion/EXM124-9.0.000-bundle.json"); - - runEvaluateMeasure("2019-01-01", "2020-01-01", "Patient/numer-EXM124", "measure-EXM124-7.0.000", "Individual", null); - runEvaluateMeasure("2019-01-01", "2020-01-01", "Patient/numer-EXM124", "measure-EXM124-9.0.000", "Individual", null); - - } - - @Test - void testLargeValuesetMeasure() throws NoSuchElementException { - this.loadBundle("largeValueSetMeasureTest-Bundle.json"); - - var returnMeasureReport = runEvaluateMeasure("2023-01-01", "2024-01-01", null, "CMSTest", "population", null); - - String populationName = "numerator"; - int expectedCount = 1; - - Optional population = returnMeasureReport.getGroup().get(0) - .getPopulation().stream().filter(x -> x.hasCode() && x.getCode().hasCoding() - && x.getCode().getCoding().get(0).getCode().equals(populationName)) - .findFirst(); - assertTrue(population.isPresent(), String.format("population \"%s\" not found in report", populationName)); - assertEquals( - expectedCount, - population.get().getCount(), - String.format("expected count for population \"%s\" did not match", populationName) - ); - } } diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java index 09071d6ca64..e80eab49bc4 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java @@ -4,7 +4,6 @@ import ca.uhn.fhir.cr.TestCqlProperties; import ca.uhn.fhir.cr.TestCrConfig; import ca.uhn.fhir.cr.common.CqlThreadFactory; import ca.uhn.fhir.cr.config.r4.CrR4Config; -import ca.uhn.fhir.rest.api.SearchStyleEnum; import org.cqframework.cql.cql2elm.CqlCompilerOptions; import org.cqframework.cql.cql2elm.model.CompiledLibrary; import org.cqframework.cql.cql2elm.model.Model; @@ -13,7 +12,6 @@ import org.hl7.elm.r1.VersionedIdentifier; import org.opencds.cqf.cql.engine.execution.CqlEngine; import org.opencds.cqf.cql.engine.runtime.Code; import org.opencds.cqf.fhir.cql.EvaluationSettings; -import org.opencds.cqf.fhir.cql.engine.retrieve.BaseRetrieveProvider; import org.opencds.cqf.fhir.cql.engine.retrieve.RetrieveSettings; import org.opencds.cqf.fhir.cql.engine.terminology.TerminologySettings; import org.opencds.cqf.fhir.cr.measure.CareGapsProperties; diff --git a/hapi-fhir-storage-cr/src/test/resources/ColorectalCancerScreeningsFHIR-bundle.json b/hapi-fhir-storage-cr/src/test/resources/ColorectalCancerScreeningsFHIR-bundle.json index b48340e557e..96025856625 100644 --- a/hapi-fhir-storage-cr/src/test/resources/ColorectalCancerScreeningsFHIR-bundle.json +++ b/hapi-fhir-storage-cr/src/test/resources/ColorectalCancerScreeningsFHIR-bundle.json @@ -645,7 +645,7 @@ { "type": "depends-on", "display": "Code system SNOMEDCT:2017-09", - "resource": "http://snomed.info/sct|http://snomed.info/sct/version/201709" + "resource": "http://snomed.info/sct/version/201709" }, { "type": "depends-on", @@ -91120,7 +91120,7 @@ { "type": "depends-on", "display": "Code system SNOMEDCT:2017-09", - "resource": "http://snomed.info/sct|http://snomed.info/sct/version/201709" + "resource": "http://snomed.info" }, { "type": "depends-on", @@ -149191,13 +149191,46 @@ } ], "gender": "male", - "birthDate": "1965-01-01" + "birthDate": "1965-01-01", + "generalPractitioner":[ + {"reference": "Practitioner/practitioner-EXM130"} + ] }, "request": { "method": "PUT", "url": "Patient/numer-EXM130" } }, + {"resource":{ + "resourceType": "Group", + "id": "group-EXM130", + "type": "person", + "actual": true, + "member": [ + { "entity": { "reference": "Patient/denom-EXM130" } }, + { "entity": { "reference": "Patient/numer-EXM130" } }, + { "entity": { "reference": "Patient/neg-ip-EXM130" } } + ] + }, + "request": { + "method": "PUT", + "url": "Group/group-EXM130" + } + }, + {"resource":{ + "resourceType": "Practitioner", + "id": "practitioner-EXM130", + "name": [ { + "family": "Dawg", + "given": [ "Jay" ], + "prefix": [ "Dr" ] + } ] + }, + "request": { + "method": "PUT", + "url": "Practitioner/practitioner-EXM130" + } + }, { "resource": { "resourceType": "ValueSet", diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index 6762591a2b9..df18f8b9d40 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index e62f8c7e2bf..d93b55c5cb6 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index 64d3e7c6c2e..5aea02bd8f2 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index c93c2bdb158..e62996475ba 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 64485e7520e..7297607fc8a 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index eaf3612ccd0..821380e7508 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index cd303bde32e..88120888121 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index f6ebdd3c94b..d0b3a4a6561 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index 59edff2a9d6..26d6ce29e11 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 813116ef724..0c68498f67c 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index 181461ece57..dedb670bef1 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 5caacf907a9..8f5f6350e30 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 158075f8d8d..5d10e9b7631 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index a7114fe0883..f72ebdf1655 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 36197381a90..7ca2befe340 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index d283ed7ab1d..73a32446518 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index d912140ab61..4c457ee1d84 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index b030ccb250b..6bebb0710d6 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index 375d4769355..18a7b98a41e 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 6845e456f5c..cf35dee8dcf 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 4ade7bb82fd..9044c7b169e 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 7f8822aceb7..c6ef616a573 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. @@ -1006,7 +1006,7 @@ 1.0.8 - 3.3.1 + 3.4.0 5.4.1 diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index a7cd1eba2a8..a1d051bf841 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index aa7c4d23be7..44c164994db 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 25a2406ef92..4744f63418c 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.9-SNAPSHOT + 7.1.10-SNAPSHOT ../../pom.xml From 29cddaecc71b5e60ed7b3196f8bb7fd905d570cf Mon Sep 17 00:00:00 2001 From: longma1 <32119004+longma1@users.noreply.github.com> Date: Thu, 18 Apr 2024 16:36:24 -0600 Subject: [PATCH 15/26] March 24 master mb (#5858) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Remove superfluous migration * Remove superfluous migration * Update upgrade.md * fixing test (#4835) Co-authored-by: leif stawnyczy * email subscription, throw NullPointerException (#4790) * fix bug * Favoring constructor initialization to autowiring. * enhancing test. * Making class LoggingEmailSender available outside of the hapi-fhir-japserver-uhnfhirtest module. * Passing all tests. * adding changelog. * Bumping version to 6.5.20-SNAPSHOT * addressing code review comment. --------- Co-authored-by: peartree * Add docs for CR operations (#4855) * Add docs for CR operations * Correct changelog and javadoc for $package * Add documentation for $apply parameters * Add additional documentation for $package * Cleanup * Cleanup * Cleanup * Address review comments * Add documentation for $care-gaps operation. (#4862) * Add documentation for -gaps. * addressing the comments. --------- Co-authored-by: Chalma Maadaadi * 4853 validation does not error when display is not the same as the display defined in the codesystem 2 (#4854) * added failing test * implemented the solution * changed test name * added change log * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4853-validation-does-not-error-when-display-is-not-the-same-as-the-display-defined-in-the-codesystem-2.yaml Co-authored-by: James Agnew --------- Co-authored-by: Steven Li Co-authored-by: James Agnew * fixing patient everything operator (#4845) * fixing patient everything operator * review fix --------- Co-authored-by: leif stawnyczy * fix link * Move image file * Bundle resources containing over 100 references to the same Organization will fail with HAPI-2207 (#4871) * Add failing unit test. * Fix JpaId Long equality comparison to use ! equals() instead of !=, which fails for different instances of the same Long value. * Add changelog. * added warn message and test (#4848) * added warn message and test * code review fixes --------- Co-authored-by: Long Ma * Issue 4804 full table scan on mpi link during mdm clear (#4805) * version bump for next release (#4793) * version bump * Bump to correctnumber * Version Enum and folder * Remove interim from list * wip * Fix operation on nested type-choices in FhirPatch implementation (#4783) * Fix operation on nested type-choices in FhirPatch implementation * Add credit for #4783 --------- Co-authored-by: James Agnew * #4468 fix previous link offset no cache pagination (#4489) * #4468 Add test reproducing the issue * #4468 Fix previous link for no cache offset pagination * #4468 Use unchecked URI parsing * Credit for #4489 --------- Co-authored-by: James Agnew * Changelog and data generating test * Add MdmLink index * Avoid double link deletion * Use ThreadLocal safely --------- Co-authored-by: Tadgh Co-authored-by: Zach Smith <85943952+zachdoctolib@users.noreply.github.com> Co-authored-by: James Agnew Co-authored-by: Aleksej Parovysnik <100864000+alparodev@users.noreply.github.com> Co-authored-by: juan.marchionatto * Fix erroneous batch 2 $export 75% complete count when the job is COMPLETE (#4859) * Add failing unit test. * Add conditional logic to the InstanceProgress progress percentage to disregard the incomplete count if this is called from the reduction step. This is to get around a race condition in which a work chunk is QUEUED and not yet complete when the reduction step calculates the progress. * Add final. * Add changelog. * disable wars (#4877) Co-authored-by: Ken Stevens * 4868 fix paging hapi (#4870) * fixing some offset and adding a test * fixing the offset paging * Removing duplicate --------- Co-authored-by: leif stawnyczy Co-authored-by: Aleksej Parovysnik <100864000+alparodev@users.noreply.github.com> * 4875-binary-access-write-doest-trigger-STORAGE-BINARY-ASSIGN-BLOB-ID-PREFIX-pointcut (#4876) * Add failing test * Add failing test * Fix and changelog * Pass content type parameter * Back to auto wiring the context * Invoke interceptor only when getting blobId, not also when storing it * Avoid breaking implementers * Address review comment * Add new exception Msg code * Fix broken test --------- Co-authored-by: juan.marchionatto * Fix batch job (bulk export) processed record count (#4879) * Remove racy stats recalc. * Throw 404 when requesting $export of non-existent Group or Patient (#4890) * Remove default implementation intended only for interim backwards compatibility (#4894) Co-authored-by: juan.marchionatto * Rule apply patient export (#4893) * Test, fix, and changelog * Better partition resolution * Add checks based on rule applier * Fix ordering failure due to hash set * Allow empty auth interceptor * Fix up operation type on invocation * Add more tests, make hack implementation for patient instance level operation * Tighten test name * Changelog * Default method * remove dead method * Remove dead autowire --------- Co-authored-by: Michael Buckley * cve pom changes (#4898) Co-authored-by: Long Ma * backport subscription topic bean cleanup (#4904) * 4891 bulk export do not recurse unasked for resources (#4895) * updating tests * fixing bulk export to not fetch resources not requested * cleanup * cleanup * more warning suppressing * adding error code * blah * fix test * review fixes --------- Co-authored-by: leif stawnyczy * lowers log level to remove bootup noise (#4908) * CVE rel 6 6 (#4907) * cve pom changes * bump javax.el to jakarta.el --------- Co-authored-by: Long Ma * Issue 4905 post binary failure invoking interceptor for pointcuts storage preshow resources (#4906) * Initial failing test * Avoid applying binary blob id prefix multiple times * Remove recently introduced method not needed anymore --------- Co-authored-by: juan.marchionatto * Enhance LogbackCaptureTestExtension (#4869) * repro bug with test, fix bug * ken informed me he resolved this bug on master, so i'm switching to use his solution * disable wars * review feedback * review feedback * review feedback again --------- Co-authored-by: josie Co-authored-by: Ken Stevens * Resolve 4863 from release branch searchparametercanonicalizer does not account for search parameters for custom resources types when converting dstu23 into runtimesearchparam (#4887) * Modified canonicalizeSearchParameterDstu2 and 3, now correctly detect search parameters for custom resources * Canonicalizers now correctly handle search parameters for custom resources * created changelog * Modification based on comments: - remove Resource from target field when there are custom resource types - fixed changelog typo - removed unnecessary variable providesMembershipInCompartments * Added tests for the SearchParameterCanonicalizer to test if base and target of RuntimeSearchParam is set as expected for DSTU2, DSTU3, R4, R4B, and R5 resources * Fixed typo and removed commented code * re-ordered init methods * Update changelog Co-authored-by: Tadgh * modifications following first code review. --------- Co-authored-by: Tadgh Co-authored-by: peartree * License * Remove _lastUpdated filtering of _revincludes. (#4899) Remove _lastUpdated filtering of _revincludes. * 4910-dm-migration-error-for-oracle-19c (#4916) * Remove all_constraints references which break in oracle 19c * Add changelog --------- Co-authored-by: juan.marchionatto * 4873 empty fhirid causes missing resource (#4874) * add check for empty fhirid string and add test * add test for populateid * changelog * version bump * version bump * reverse version bump * Back to 6.5.21-SNAPSHOT. --------- Co-authored-by: justindar Co-authored-by: Luke deGruchy * Fix include canonical url performance (#4919) Use hash_identity for canonical join * License * Version bump * Temp disable * Make this dependency provided * Updating version to: 6.6.1 post release. * Bump snakeyaml * Bump jackson to be compatible with new snakeyaml * Bump deps (#4926) * resolve CVEs, add new notes * Updating version to: 6.6.2 post release. * backport pr 5013 tp rel_6_4 * bump to 6.4.6 * added new version version.yaml * Updating version to: 6.4.7 post release. * #5023 Unexpected usage of awssdk StringUtils in hapi 6.6 jpa migration (#5024) task Replace usage of awssdk * added docs * mssql test fixes * Updating version to: 6.6.3 post release. * version bump * Bump to core release 6.0.22 (#5028) * Bump to core release 6.0.16 * Bump to core version 6.0.20 * Fix errors thrown as a result of VersionSpecificWorkerContextWrapper * Bump to core 6.0.22 * Resolve 5126 hfj res ver prov might cause migration error on db that automatically indexes the primary key (#5127) * dropped old index FK_RESVERPROV_RES_PID on RES_PID column before adding IDX_RESVERPROV_RES_PID * added changelog * changed to valid version number * changed to valid version number, need to be ordered by version number... * 5123 - Use DEFAULT partition for server-based requests if none specified (#5124) 5123 - Use DEFAULT partition for server-based requests if none specified * consent remove all suppresses next link in bundle (#5119) * added FIXME with source of issue * added FIXME with root cause * added FIXME with root cause * Providing solution to the issue and removing fixmes. * Providing changelog * auto-formatting. * Adding new test. * Adding a new test for standard paging * let's try this and see if it works...? * fix tests * cleanup to trigger a new run * fixing tests --------- Co-authored-by: Ken Stevens Co-authored-by: peartree * 5117 MDM Score for No Match Fields Should Not Be Included in Total Score (#5118) * fix, test, changelog * fix, test, changelog --------- Co-authored-by: justindar * _source search parameter needs to support modifiers (#5095) _source search parameter needs to support modifiers - added support form :contains, :missing, :above modifiers * Fix HFQL docs (#5151) * Expunge operation on codesystem may throw 500 internal error with precondition fail message. (#5156) * Initial failing test. * Solution with changelog. * fixing format. * Addressing comment from code review. * fixing failing test. --------- Co-authored-by: peartree * documentation update (#5154) Co-authored-by: leif stawnyczy * Fix hsql jdbc driver deps (#5168) Avoid non-included classes in jdbc driver dependencies. * $delete-expunge over 10k resources will now delete all resources (#5144) * First commit with very rough fix and unit test. * Refinements to ResourceIdListStep and Batch2DaoSvcImpl. Make LoadIdsStepTest pass. Enhance Batch2DaoSvcImplTest. * Spotless * Fix checkstyle errors. * Fix test failures. * Minor refactoring. New unit test. Finalize changelist. * Spotless fix. * Delete now useless code from unit test. * Delete more useless code. * Test pre-commit hook * More spotless fixes. * Address most code review feedback. * Remove use of pageSize parameter and see if this breaks the pipeline. * Remove use of pageSize parameter and see if this breaks the pipeline. * Fix the noUrl case by passing an unlimited Pegeable instead. Effectively stop using page size for most databases. * Deprecate the old method and have it call the new one by default. * updating documentation (#5170) Co-authored-by: leif stawnyczy * _source search parameter modifiers for Subscription matching (#5159) * _source search parameter modifiers for Subscription matching - test, implementation and changelog * Removal of meta tags during updates do not trigger subscription (#5181) * Initial failing test. * adding solution; fixing documentation; * spotless apply * adding changelog * modifying current test --------- Co-authored-by: peartree * Issue 5173 get gateway everything doesnt return all patients (#5174) * Failing test * Also set offset and count in base DAO override * Changelog * Fix for specific case where count has been set in parameters * spotless * Improve checks --------- Co-authored-by: juan.marchionatto * Do not 500 and continue IG ingestion when different IGs try to save different ValueSets with colliding FHIR IDs (#5175) * First commit with failing unit test and small tweaks. * Swallow resource version exceptions from colliding ValueSet OIDs and log a descriptive error instead. Add more detailed unit testing. * Tweaks to logic and update the changelog. Reverse all changes to TermReadSvcImpl. * Revert PackageResourceParsingSvc to release branch baseline. * Accept code reviewer suggestion to change changelog description. Co-authored-by: michaelabuckley --------- Co-authored-by: michaelabuckley * Fix link * Remove target slf4j version * dont use new API for a bit (#5191) * Return DropIdGeneratorTask from the Builder to permit clients to mutate the (#5193) DropIdGeneratorTask. * Dqm performance bug update and provider loader fix (#5180) * update tests, move properties, update operation loader * update wip * remove test * fixing tests, adding config * update config and provider loader * fix bundles * fix cache settings on tests * version bump and change log * version bump * fix formatting * CVE-2022-45868 * wip cve change * cve h2 add back in --------- Co-authored-by: justin.mckelvy * bulkExportReuse with POST and GET (#5161) * string manipulation * Code to ensure bulkExportReuse works with POST and GET requests * Added formatting changes * Fixed tests that were not working * Formatting * Code clean up * fixing test failures * fixing test failures * Removed arrOfParams to now utilize ObjectMapper * Removing stack trace and adding an exception * Fixed test issue * formatting * formatting * Resolving code review comments * Reduce size of subscription max results (#5194) * Reduce MAX_SUBSCRIPTION_RESULTS to 10000 * Add changelog * 5037 goldenresource remains when target resource deleted (#5038) * draft test and fix * remove unused fields * remove unused fields * remove unused fields * draft test + solution for possible match case * combine sql statement + more error checking * add test case for possible duplicate * add config for autodeleting grs * refactoring, adding support for mongo, docs * refactoring + fixing mongo queries * add changelogs * fix both way link removal * clean up test comments * rename method * remove unnecessary bean * merge master/resolve conflicts * mvn spotless * address comment * changes to avoid version bumping * spotless * change error code --------- Co-authored-by: justindar * dont use new API for a bit (#5190) * licenses * Fixing terminology upload with large file. (#5204) * create initial test * adding fix; adding changelog; * fixing format. --------- Co-authored-by: peartree * up version to 6.8.0 from snapshot * fix up some duplicate deps for skipping * Force deployment of other projects * make dummy java class to pass sonatype checks * make dummy java class to pass sonatype checks * Cheeky test removal * Another test removal * Updating version to: 6.8.1 post release. * adding back in tests for release pipelines * Fix bad version insert * Bump to core library version 6.0.22.2 (#5261) * Bump to core library version 6.0.22.2 * Bump HAPI to 6.8.1-SNAPSHOT, core library version to 6.0.22.2 * up version to 6.8.1 * Prevent remote deployment * Updating version to: 6.8.2 post release. * Re-add deploy plugin skip * re-add version * Bump core library version to 6.0.22.2 * Bump HAPI to 6.8.2-SNAPSHOT * Fix up version enum * Version bump * Updating version to: 6.8.3 post release. * Make display name validation configurable (#5321) * Test fixes * Build cleanup * Initial test passing * Test fixes * Tests all seem to be working * Make display validation level configurable * Should be all working * Add changelog * Add to changelog --------- Co-authored-by: Tadgh * wip * wip * Fix conflicts * add backport info * Fix versioning * test fixes * disabled 1 test * backported 5271 to fix validation message tests * wip * Changelog * Fix tests * Fix backport * Add upgrade.md * Fix test * Fix key * Updating version to: 6.8.4 post release. * 5353 iterate on revincludes and includes does not return correct resources when used with non iterate revincludes (#5354) * added failing test * implemented solution * added doc generation * added changelog * fixed the order of includes and revincludes, based on what was implemented before * fixed formatting * fixed wording --------- Co-authored-by: Steven Li * added changelog * Updating version to: 6.8.5 post release. * merge back 5377 * changelog * Allow cached search with consent active when safe (#5387) Allow the search cache when using consent if safe * Change package installation behaviour such that it updates the existing SearchParameter base with remaining resources (#5376) * Change package installation behavior such that it updates the existing SearchParameter base with remaining resources * Change package installation behavior such that it updates the existing SearchParameter base with remaining resources * Use resourceType in the package installer output to fix tests. Minor change with resourceType condition. Update changelog description to make it more readable. * Use resourceType in the package installer output to fix tests. Minor change with resourceType condition. Update changelog description to make it more readable. * Updating version to: 6.8.6 post release. * Transaction with conditional update fails if SearchNarrowingInterceptor is registered and Enabled Partitioning (#5389) * Transaction with conditional update fails if SearchNarrowingInterceptor is registered and Enabled Partitioning - Implementation * Reverse Chaining searches returns an error when invoked with parameter _lastUpdated. (#5177) * version bump * Bump to core release 6.0.22 (#5028) * Bump to core release 6.0.16 * Bump to core version 6.0.20 * Fix errors thrown as a result of VersionSpecificWorkerContextWrapper * Bump to core 6.0.22 * Resolve 5126 hfj res ver prov might cause migration error on db that automatically indexes the primary key (#5127) * dropped old index FK_RESVERPROV_RES_PID on RES_PID column before adding IDX_RESVERPROV_RES_PID * added changelog * changed to valid version number * changed to valid version number, need to be ordered by version number... * 5123 - Use DEFAULT partition for server-based requests if none specified (#5124) 5123 - Use DEFAULT partition for server-based requests if none specified * consent remove all suppresses next link in bundle (#5119) * added FIXME with source of issue * added FIXME with root cause * added FIXME with root cause * Providing solution to the issue and removing fixmes. * Providing changelog * auto-formatting. * Adding new test. * Adding a new test for standard paging * let's try this and see if it works...? * fix tests * cleanup to trigger a new run * fixing tests --------- Co-authored-by: Ken Stevens Co-authored-by: peartree * 5117 MDM Score for No Match Fields Should Not Be Included in Total Score (#5118) * fix, test, changelog * fix, test, changelog --------- Co-authored-by: justindar * _source search parameter needs to support modifiers (#5095) _source search parameter needs to support modifiers - added support form :contains, :missing, :above modifiers * Fix HFQL docs (#5151) * Expunge operation on codesystem may throw 500 internal error with precondition fail message. (#5156) * Initial failing test. * Solution with changelog. * fixing format. * Addressing comment from code review. * fixing failing test. --------- Co-authored-by: peartree * documentation update (#5154) Co-authored-by: leif stawnyczy * Fix hsql jdbc driver deps (#5168) Avoid non-included classes in jdbc driver dependencies. * $delete-expunge over 10k resources will now delete all resources (#5144) * First commit with very rough fix and unit test. * Refinements to ResourceIdListStep and Batch2DaoSvcImpl. Make LoadIdsStepTest pass. Enhance Batch2DaoSvcImplTest. * Spotless * Fix checkstyle errors. * Fix test failures. * Minor refactoring. New unit test. Finalize changelist. * Spotless fix. * Delete now useless code from unit test. * Delete more useless code. * Test pre-commit hook * More spotless fixes. * Address most code review feedback. * Remove use of pageSize parameter and see if this breaks the pipeline. * Remove use of pageSize parameter and see if this breaks the pipeline. * Fix the noUrl case by passing an unlimited Pegeable instead. Effectively stop using page size for most databases. * Deprecate the old method and have it call the new one by default. * updating documentation (#5170) Co-authored-by: leif stawnyczy * _source search parameter modifiers for Subscription matching (#5159) * _source search parameter modifiers for Subscription matching - test, implementation and changelog * first fix * tests and preliminary fixes * wip, commit before switching to release branch. * adding capability to handle _lastUpdated in reverse search (_has) * adding changelog * applying spotless. * addressing code review comments. --------- Co-authored-by: tadgh Co-authored-by: dotasek Co-authored-by: Steve Corbett <137920358+steve-corbett-smilecdr@users.noreply.github.com> Co-authored-by: Ken Stevens Co-authored-by: Ken Stevens Co-authored-by: peartree Co-authored-by: jdar8 <69840459+jdar8@users.noreply.github.com> Co-authored-by: justindar Co-authored-by: volodymyr-korzh <132366313+volodymyr-korzh@users.noreply.github.com> Co-authored-by: Nathan Doef Co-authored-by: Etienne Poirier <33007955+epeartree@users.noreply.github.com> Co-authored-by: TipzCM Co-authored-by: leif stawnyczy Co-authored-by: michaelabuckley Co-authored-by: Luke deGruchy * Br 20231019 add cr settings for cds hooks (#5394) * Add settings used in CR CDS Services. Remove config dependency on Spring Boot. * Add changelog * Use String.format rather than concat strings * spotless apply * Add javadoc * Upgrade notes for the forced-id change (#5400) Add upgrade notes for forced-id * Clean stale search results more aggressively. (#5396) Use bulk DMA statements when cleaning the search cache. The cleaner job now works as long as possible until a deadline based on the scheduling frequency. * bump version of clinical reasoning (#5406) * Transaction fails if SearchNarrowingInterceptor is registered and Partitioning Enabled - fix cross-tenant requests failure (#5408) * Transaction with conditional update fails if SearchNarrowingInterceptor is registered and Enabled Partitioning - fix and tests added * removed unused alias from SQL query of mdm-clear (#5416) * Issue 5418 support Boolean class return type in BaseInterceptorService (#5421) * Enable child classes to use Boolean class return type * spotless --------- Co-authored-by: juan.marchionatto * If AutoInflateBinaries is enabled, binaries are created on the disk only for the first resource entry of the bundle (#5420) * If AutoInflateBinaries is enabled, binaries created on disk by bundled requests are created only for the first resource entry - fix * Revert "Issue 5418 support Boolean class return type in BaseInterceptorService (#5421)" (#5423) This reverts commit 4e295a59fb143a2ba54bc44305474096e2acd578. Co-authored-by: Nathan Doef * Use new FHIR_ID column for sorting (#5405) * Sort `_id` using new FHIR_ID column. * Fix old tests that put client-assigned ids first. * Better indexing for sort * Bump core to 6.1.2.2 (#5425) * Bump core to 6.1.2.1 Patch release that uses https for primary org.hl7.fhir.core package server * Bump core to 6.1.2.2 * Make sure to return always a value for Boolean class return type. (#5424) Implement change in a non-disruptive way for overriders Co-authored-by: juan.marchionatto * Add non-standard __pid SP for breaking ties cheaply during sorts. (#5428) Add a non-standard __pid SP. * Review changes for new _pid SP. (#5430) Change name to _pid to match our standard and add warning. * Fix VersionCanonicalizer conversion from R5 into DSTU2 for CapabilityStatement, Parameters and StructuredDefinition (#5432) * Fix VersionCanonicalizer conversion from R5 into DSTU2 for CapabilityStatement, Parameters and StructuredDefinition. * Fix spotless issue * CVEs for 6.10.0 (#5433) * Bump jetty * Bump okio-jvm * 8.2.0 mysql connector * Jena and elastic bumps * Fix test * 5412 post bundle on partition incorrect response.link shown (#5413) * Initial fix and unit test provided * spottless check * Made relevant changes to make solution version agnostic * relevant logic changes made * spotless changes made * New logic added to fix failing test cases * formatting * New logic to make the function more robust * spotless checks * Left a trailing slash in the tests * Made relevant test changes and changed logic * spotless changes * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_0/5412-during-partition-fullUrl-not-shown-in-response.yaml changing changelog Co-authored-by: volodymyr-korzh <132366313+volodymyr-korzh@users.noreply.github.com> * Formatting requirements --------- Co-authored-by: volodymyr-korzh <132366313+volodymyr-korzh@users.noreply.github.com> * Resolve We don't have guaranteed subscription delivery if a resource is too large (#5414) * first fix * - added the ability to handle null payload to SubscriptionDeliveringMessageSubscriber and SubscriptionDeliveringEmailSubscriber - refactored code to reduce repeated code - cleaned unnecessary comments and reformatted files * Changed myResourceModifiedMessagePersistenceSvc to be autowired * removed unused import * added error handling when inflating the message to email and message subscriber * reformatted code * Fixing subscription tests with mocked IResourceModifiedMessagePersistenceSvc * Changes by gary * Reformatted file * fixed failed tests * implemented test for message and email delivery subscriber. Fixed logical error. Reformatted File. * - implemented IT - fixed logical error - added changelog * fix for cdr tests, NOTE: this makes the assumption that we will always succeed for inflating the database in the tests that uses SynchronousSubscriptionMatcherInterceptor * fix for cdr tests, NOTE: this makes the assumption that we will always succeed for inflating the database in the tests that uses SynchronousSubscriptionMatcherInterceptor * resolve code review comments * reformatted files * fixed tests * Fix for failing IT test in jpaserver-starter (#5435) Co-authored-by: dotasek * wip * Bump jackson databind * Pin Version * Ignored duplicate classes * Updating version to: 6.10.1 post release. * Fix pom * Skip remorte nexus * make release faster * Updating version to: 6.10.1 post release. * Oracle create index migration recovery (#5511) * CLI tool command migrate-database executing in dry-run mode insert entries into table FLY_HFJ_MIGRATION (#5487) * initial test * Solution with changelog. * making spotless hapi * addressing comments from code reviews * making the test better. * addressing code review comment and adding test. --------- Co-authored-by: peartree * added upgrade docs and backport changelogs * Oracle create index migration recovery (#5511) * CLI tool command migrate-database executing in dry-run mode insert entries into table FLY_HFJ_MIGRATION (#5487) * initial test * Solution with changelog. * making spotless hapi * addressing comments from code reviews * making the test better. * addressing code review comment and adding test. --------- Co-authored-by: peartree * added changelog, fix 6.10.0's version.yaml * Fix bad resource id migration (#5548) * Fix bad migration of fhir id. Fix the original migration ForceIdMigrationCopyTask. Also add another migration ForceIdMigrationFixTask to trim the fhir id to correct the data. * Bump to 6.10.1-SNAPSHOT * Merge the fhir_id copy migration with the fhir_id fix to avoid traversing hfj_resource twice. (#5552) Turn off the original migration ForceIdMigrationCopyTask. Fix it anyway so nobody copies bad code. Also add another migration ForceIdMigrationFixTask that fixes the bad data, as well as fills in the fhir_id column for new migrations. * move to non snapshot version * Updating version to: 6.8.7 post release. * Fix fixed migration (#5571) * Fix bad migration, prep for 6.10.2 * spotless * Version bump * Updating version to: 6.10.3 post release. * backport mdm duplicate identifier fix * added bunch of license stuff * add changelog for 6.10.3 * Updating version to: 6.10.4 post release. * pom bump to 7.0.0 (#5615) * pom bump to 7.0.0 * add version enum * fixed feb release name --------- Co-authored-by: Long Ma * Check index existence on raw SQL creation (#5624) * Add conditional check on index existence before we try again * Add conditional check on index existence before we try again * Changelog * remit * remit * debug statements * 5621 deadlock on caffeine cache when creating a resource with conditional create (#5622) * Modifying the CacheProvider to avoid doing db I/O within the cache miss value supplier callback. * Setting the initial capacity of instantiated caches to the cache max size to avoid resizing during operations. * adding changelog and spotless. * Fixing typo. * Addressing comments from code review. --------- Co-authored-by: peartree * Searching with more than one chained Bundle SearchParameter returns incorrect results (#5614) * Failing test * fix * changelog * Avoiding Exception being thrown on @EventListener invocation (#5628) * replaced EventListener annotation with @PreDestroy * adding changelog --------- Co-authored-by: peartree * simple fix (#5630) Co-authored-by: Long Ma * Incorrect version of auto versioned reference for conditional update with urn id placeholder (#5625) * Incorrect version from versioned_references.auto_at_paths for conditional update - implementation * Oracle: Ensure migrated database still takes large resource text updates (#5629) * First pass at fix to Oracle HFJ_RES_VER.RES_TEXT_VC migration. * First stab at agreed upon solution. * Fix error with 4001 by removing unnecessary annotation. * Spotless and TODO. * Remove annotation for good and set length to LONG32. * Fix copyright year. * Finalize changelog. * Remove migration changes. Fix unit test. * Fix compile error. * Log output. * Refactor resource history code into new ResourceHistoryCalculator. * Spotless. * Convert record to POJO. * Restore pre-17 switch statement. * Finalize new resource history calculator code and tests. * Spotless. * Remove logging. * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5633-oracle-hfj-res-ver-clob-migration.yaml Apply code reviewer suggestion Co-authored-by: Michael Buckley * Code review feedback. --------- Co-authored-by: Michael Buckley * Fix expansion for `ValueSet` with no concepts based on CodeSystem `urn:ietf:bcp:13` (#5638) * When fetching the mimetype code system, return empty CodeSystem with NOTSUPPORTED content. Update expansion logic to handle this case. Add some test cases. * Minor change to fix test * Rename changelog file * Remove TODOs as they're replaced by reported issue * Revert accidental change added with TODO removal * $expunge operation ignoring ExpungeThreadCount setting in certain cases (#5637) * $expunge operation ignoring ExpungeThreadCount setting in certain cases - implementation * Fix Measure group id null pointer exception (#5620) * Update to new version of clinical reasoning, includes a fix for null pointers on Measure group id * add changelog --------- Co-authored-by: Justin McKelvy <60718638+Capt-Mac@users.noreply.github.com> * Rule evaluation: Allow Bundles with PATCH Parameters (#5641) * Remove parameters clause. * Finalize changelog. Add tests. Finalize implementation. * Undo changes to this test. * Revert all changes to FhirQueryRuleImplTest. Add new RuleImplOpTest. Ensure that proper nested Bundle is created for test and other fixes. * Tweak test. * Use real rule applier in test and remove all mocks. * Prevent batch2 job execution to stop for empty chunk when last job st… (#5635) * Prevent batch2 job execution to stop for empty chunk when last job step is a reduction. Add output to bulk export result even when empty. * Fix test * Unimportant change to force fresh build * Implement review suggestions --------- Co-authored-by: juan.marchionatto * Index review fixes (#5649) * Don't hold locks while adding indices during upgrade. * concurrent indexing is non-transactional in PG. * 5621 deadlock on caffeine cache when creating a resource with conditional create (#5622) * Modifying the CacheProvider to avoid doing db I/O within the cache miss value supplier callback. * Setting the initial capacity of instantiated caches to the cache max size to avoid resizing during operations. * adding changelog and spotless. * Fixing typo. * Addressing comments from code review. --------- Co-authored-by: peartree * added 6.10.4 changelog and backport changelog * Fix conditional creates without leading '?' (#5646) * First commit with failing test. * More tests and logs. * More logs * Try new solution for BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(). * Simplify solution. Add more tests. * Changelog. * javadoc * Searching for Bundles with read all Bundles permission returns 403 (#5644) * failing test * another failing test case * fix * changelog * fix bug * spotless * cr * Fix NullPointerException when performing a system bulk export in the presence of PatientIdPartitionInterceptor. (#5660) * Updating version to: 6.10.5 post release. * Bump json-path * Pin parrson * Bump elastic * Bump spring version * Exclude JDBC * Serializing changes for sensitive data (#5655) * Add new senstiive data serializer * Add new senstiive data serializer * Add new senstiive data serializer * Remove dead comments * Change up the test * review comments * wip * Tighten tests and push annotation down * Tighten tests and push annotation down * Changelog * Add test * 7.0.1-SNAPSHOT bump * Error code * Add method used by CDR * add version enum * Fix test * Revert change to other safe version to stop problem with deprecated field * Rel 7 0 CVE (#5663) * Bump json-path * Pin parrson * Bump elastic * Bump spring version * Exclude JDBC * Descendent fix (#5669) * Fix "is-a" ValueSet expansion, add "descendent-of" support * Fixed tests in DSTU3 and R5 * Trigger new build * Revert "Trigger new build" This reverts commit 46c672b338db1f85c4f438344bfa828bfc723a37. * fix default partition setting on resource (#5617) * fix default partition setting on resource * changelog * Handle DEFAULT partition in rule checker. * Fix spotless --------- Co-authored-by: Michael Buckley Co-authored-by: James Agnew * pom bump, doc add, version enum add (#5616) Co-authored-by: Long Ma * fix default partition setting on resource (#5618) * fix default partition setting on resource * Handle DEFAULT partition in rule checker. Co-authored-by: Ken Stevens * Add setting to make broker not use JacksonMessageConverter (#5611) * Add setting to make broker not use JacksonMessageConverter * Add changelog * Implement suggestions --------- Co-authored-by: juan.marchionatto * Fix version * add changelog, add attribution, remove accidental bring-overs * add changelog, add attribution, remove accidental bring-overs * Finish jira section --------- Co-authored-by: Ole Hedegaard Co-authored-by: Ken Stevens Co-authored-by: Michael Buckley Co-authored-by: James Agnew Co-authored-by: longma1 <32119004+longma1@users.noreply.github.com> Co-authored-by: Long Ma Co-authored-by: jmarchionatto <60409882+jmarchionatto@users.noreply.github.com> Co-authored-by: juan.marchionatto * $poll-export-status operation with PatientIdPartitionInterceptor fails with NullPointerException (#5681) * Fix NullPointerException when performing a system bulk export in the presence of PatientIdPartitionInterceptor. * Fix NPE on -export-status operation with PatientIdPartitionInterceptor * 5654 measurescorer bug for denominator exception (#5677) * update measure bundles and tests for enforced populationId specification on Measure group resources * bump CR version to PRE17 * fix bug in versionEnumTest * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5654-measurescorer-bug.yaml Co-authored-by: Tadgh --------- Co-authored-by: Tadgh * Revert Remote Terminology lookup operation to support all types of CodeSystem property and convert to them to string type. (#5698) * Revert Remote Terminology lookup operation to support all types of CodeSystem property and convert to them to string type. * Spotless fix * Address code review comments * version bump to 7.0.0 * pipeline fix * Update java home * Updating version to: 7.0.1 post release. * Bump fix. (#5710) * Backport: Searching with more than one chained Bundle SearchParameter returns i… (#5711) * Searching with more than one chained Bundle SearchParameter returns incorrect results (#5614) * Add backport info to changelog. --------- Co-authored-by: Nathan Doef * backport PR5589. (#5716) Co-authored-by: James Agnew * Backport 5722 into 7.0.1 (#5729) * Bugfix: removed reference to public schema in raw sql (#5722) * Bugfix: removed reference to public schema in raw sql * Add attribution for #5722 --------- Co-authored-by: James Agnew * Add backport tag * Build fix --------- Co-authored-by: pano-smals <135063507+pano-smals@users.noreply.github.com> * pom change to non-snapshot * Updating version to: 6.10.6 post release. * Reflected XSS vulnerability in Testpage overlay (#5719) * HAPI FHIR Testpage potential XSS vulnerability - fix * cherry-picked fix for 5671 searchparameter locks * Rel 6 4 mb update (#5741) * Add backport and folder * Add missing changelog * Cherry p icking * Update upgrade.md * added new version version.yaml * Provide default update time to batch2 entities (#4630) * Data migration - default value for batch2 UPDATE_TIME * fixed changelog, added 6.4.3 changelog folder * Spotless --------- Co-authored-by: Kevin Dougan SmileCDR <72025369+KevinDougan-SmileCDR@users.noreply.github.com> Co-authored-by: longma1 Co-authored-by: michaelabuckley * Added correction to generated resource providers (#5743) * Added correction to generated resource providers * add changelog and fix issue that was adding languange SP to version that does not have language as a SP * actually fix the test * fix changelog wrong issue number --------- Co-authored-by: Jens Kristian Villadsen Co-authored-by: Long Ma * added docs for 7.0.1 * Add accidentally removed migration * spotless * Reducer LOB usage in Batch2 and Search (#5748) * Reducer LOB usage in Batch2 and Search * Add changelog * Rework a number of LOB columns * Test fix * Test fix * Column fixes * Test fix * Formatting * Fixes * patching bad oracle test * Apply spotless --------- Co-authored-by: Tadgh * Fix backport line * Add changelog folder * Add knowledge of new version' * spotless * fix unintended backported change * Updating version to: 7.0.2 post release. * Updating version to: 6.8.8 post release. * Case sensitivity * merge backed ruleImlOp, not sure why previous 2 mergebacks didnt get this * cleanup * Updating version to: 7.0.3 post release. * fix changelog --------- Co-authored-by: Tadgh Co-authored-by: TipzCM Co-authored-by: leif stawnyczy Co-authored-by: Sam Gunter <123124187+samguntersmilecdr@users.noreply.github.com> Co-authored-by: peartree Co-authored-by: Brenin Rhodes Co-authored-by: chalmarm <44471040+chalmarm@users.noreply.github.com> Co-authored-by: Chalma Maadaadi Co-authored-by: StevenXLi Co-authored-by: Steven Li Co-authored-by: James Agnew Co-authored-by: Luke deGruchy Co-authored-by: Long Ma Co-authored-by: jmarchionatto <60409882+jmarchionatto@users.noreply.github.com> Co-authored-by: Zach Smith <85943952+zachdoctolib@users.noreply.github.com> Co-authored-by: Aleksej Parovysnik <100864000+alparodev@users.noreply.github.com> Co-authored-by: juan.marchionatto Co-authored-by: Ken Stevens Co-authored-by: Ken Stevens Co-authored-by: michaelabuckley Co-authored-by: Josie <80289977+pepsiofficial@users.noreply.github.com> Co-authored-by: josie Co-authored-by: TynerGjs <132295567+TynerGjs@users.noreply.github.com> Co-authored-by: jdar8 <69840459+jdar8@users.noreply.github.com> Co-authored-by: justindar Co-authored-by: markiantorno Co-authored-by: volodymyr-korzh <132366313+volodymyr-korzh@users.noreply.github.com> Co-authored-by: Thomas Papke Co-authored-by: dotasek Co-authored-by: Steve Corbett <137920358+steve-corbett-smilecdr@users.noreply.github.com> Co-authored-by: Nathan Doef Co-authored-by: Etienne Poirier <33007955+epeartree@users.noreply.github.com> Co-authored-by: Justin McKelvy <60718638+Capt-Mac@users.noreply.github.com> Co-authored-by: justin.mckelvy Co-authored-by: LalithE <132382565+LalithE@users.noreply.github.com> Co-authored-by: volodymyr Co-authored-by: Martha Mitran Co-authored-by: Nathan Doef Co-authored-by: dotasek Co-authored-by: JP Co-authored-by: Ole Hedegaard Co-authored-by: pano-smals <135063507+pano-smals@users.noreply.github.com> Co-authored-by: Kevin Dougan SmileCDR <72025369+KevinDougan-SmileCDR@users.noreply.github.com> Co-authored-by: Jens Kristian Villadsen --- .../java/ca/uhn/fhir/util/VersionEnum.java | 12 ++ .../uhn/hapi/fhir/changelog/6_10_3/upgrade.md | 0 .../hapi/fhir/changelog/6_10_3/version.yaml | 3 + .../uhn/hapi/fhir/changelog/6_10_4/upgrade.md | 0 .../hapi/fhir/changelog/6_10_4/version.yaml | 3 + .../uhn/hapi/fhir/changelog/6_4_2/upgrade.md | 2 + .../hapi/fhir/changelog/6_4_2/version.yaml | 3 + .../4636-fix-oracle-migration-problem.yaml | 4 + ...ected-and-version-missing-in-database.yaml | 7 + .../6_6_0/4584-reindex-orphan-index-rows.yaml | 1 + ...c-client-post-paging-requests-support.yaml | 1 + .../uhn/hapi/fhir/changelog/6_8_6/upgrade.md | 1 + .../hapi/fhir/changelog/6_8_6/version.yaml | 3 + .../uhn/hapi/fhir/changelog/6_8_7/upgrade.md | 6 + .../hapi/fhir/changelog/6_8_7/version.yaml | 3 + ...grate-database-in-dry-run-modifies-db.yaml | 2 +- .../5511-oracle-migration-create-index.yaml | 2 +- .../7_0_0/5563-mdm-dup-identifier.yaml | 1 + ...9-dont-contain-resources-with-fullurl.yaml | 1 + ...89-dont-encode-placeholder-ids-in-rdf.yaml | 2 + ...n-caffeine-cash-on-conditional-create.yaml | 1 + ...ndle-composition-searchparameters-fix.yaml | 1 + .../uhn/hapi/fhir/changelog/7_0_1/upgrade.md | 0 .../hapi/fhir/changelog/7_0_1/version.yaml | 3 + .../fhir/changelog/7_2_0/5671-tx-tangle.yaml | 1 + ...ed-xss-testpage-overlay-vulnerability.yaml | 1 + .../changelog/7_2_0/5722-fix-pg-schema.yaml | 1 + ...passed-to-generated-resource-provider.yaml | 3 +- ...-avoid-lob-usage-in-batch2-and-search.yaml | 1 + .../ca/uhn/fhir/jpa/auth/RuleImplOpTest.java | 135 ++++++++++++++++++ .../server/interceptor/auth/RuleImplOp.java | 11 +- 31 files changed, 211 insertions(+), 4 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_3/upgrade.md create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_3/version.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_4/upgrade.md create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_4/version.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/upgrade.md create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/version.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_3/4636-fix-oracle-migration-problem.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4204-default-config-meta-tags-user-selected-and-version-missing-in-database.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/upgrade.md create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/version.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/upgrade.md create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/version.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_1/upgrade.md create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_1/version.yaml create mode 100644 hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java index 43666973a15..1694426d1b7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java @@ -112,6 +112,7 @@ public enum VersionEnum { V6_2_5, // Dev Build V6_3_0, + V6_4_0, V6_4_1, V6_4_2, @@ -119,10 +120,13 @@ public enum VersionEnum { V6_4_4, V6_4_5, V6_4_6, + V6_5_0, + V6_6_0, V6_6_1, V6_6_2, + V6_7_0, V6_8_0, V6_8_1, @@ -130,16 +134,24 @@ public enum VersionEnum { V6_8_3, V6_8_4, V6_8_5, + V6_8_6, + V6_8_7, + V6_9_0, V6_10_0, V6_10_1, V6_10_2, V6_10_3, + V6_10_4, + V6_10_5, + V6_11_0, V7_0_0, V7_0_1, + V7_0_2, + V7_1_0, V7_2_0; diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_3/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_3/upgrade.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_3/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_3/version.yaml new file mode 100644 index 00000000000..3a71dcdc32d --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_3/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2024-01-17" +codename: "Zed" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_4/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_4/upgrade.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_4/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_4/version.yaml new file mode 100644 index 00000000000..225ca4dca57 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_4/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2024-01-31" +codename: "Zed" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/upgrade.md new file mode 100644 index 00000000000..e899e2f7150 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/upgrade.md @@ -0,0 +1,2 @@ +The known issue with Bulk Export in HAPI 6.4.0 has been resolved. Bulk export functionality is now +more performant at large scale, and does not generate occasional incomplete file reports. diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/version.yaml new file mode 100644 index 00000000000..fae61bdfc9c --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_2/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2023-02-27" +codename: "Wizard" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_3/4636-fix-oracle-migration-problem.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_3/4636-fix-oracle-migration-problem.yaml new file mode 100644 index 00000000000..e0df692ada6 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_3/4636-fix-oracle-migration-problem.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 4636 +title: "Fixed Oracle syntax problem with the fix that was previously provided via Pull Request 4630." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4204-default-config-meta-tags-user-selected-and-version-missing-in-database.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4204-default-config-meta-tags-user-selected-and-version-missing-in-database.yaml new file mode 100644 index 00000000000..2347ad58f75 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4204-default-config-meta-tags-user-selected-and-version-missing-in-database.yaml @@ -0,0 +1,7 @@ +--- +type: fix +issue: 4204 +jira: SMILE-4688 +backport: 6.4.2 +title: "With default configuration, Resource meta.tag properties: `userSelected` and `version`, were not stored in the database. + This is now fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml index 2f23b4319a5..dfab2dfedbe 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml @@ -1,4 +1,5 @@ --- type: fix issue: 4548 +backport: 6.4.2 title: "Simultaneous DELETE and $reindex operations could corrupt the search index. This has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5008-generic-client-post-paging-requests-support.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5008-generic-client-post-paging-requests-support.yaml index 286fd835d99..61640984d2a 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5008-generic-client-post-paging-requests-support.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5008-generic-client-post-paging-requests-support.yaml @@ -2,5 +2,6 @@ type: fix issue: 5008 jira: SMILE-6727 +backport: 6.4.6 title: "Previously, the GenericClient would only support the HTTP GET method for paging requests. This has been corrected by adding the possibility for paging with an HTTP POST." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/upgrade.md new file mode 100644 index 00000000000..68405650c08 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/upgrade.md @@ -0,0 +1 @@ +Fixed migration related issues diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/version.yaml new file mode 100644 index 00000000000..027fa473442 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_6/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2023-12-06" +codename: "Yucatán" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/upgrade.md new file mode 100644 index 00000000000..aca0c32d474 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/upgrade.md @@ -0,0 +1,6 @@ +This release strictly attempts to remove as much usage of the LOB table as possible in postgres. Specifically, in the JPA server, several database columns related to Batch2 jobs and searching +have been reworked so that they no will longer use LOB datatypes going forward. This +is a significant advantage on Postgresql databases as it removes a significant use +of the inefficient `pg_largeobject` table, and should yield performance boosts for +MSSQL as well. + diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/version.yaml new file mode 100644 index 00000000000..e07f9c83c81 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_7/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2024-03-01" +codename: "Yucatán" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5486-cli-migrate-database-in-dry-run-modifies-db.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5486-cli-migrate-database-in-dry-run-modifies-db.yaml index 09409e91ff7..d46b96d5340 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5486-cli-migrate-database-in-dry-run-modifies-db.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5486-cli-migrate-database-in-dry-run-modifies-db.yaml @@ -2,6 +2,6 @@ type: fix issue: 5486 jira: SMILE-7457 -backport: 6.10.1 +backport: 6.8.6,6.10.1 title: "Previously, testing database migration with cli migrate-database command in dry-run mode would insert in the migration task table. The issue has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5511-oracle-migration-create-index.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5511-oracle-migration-create-index.yaml index 2a4cd3e731d..7a8ace05128 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5511-oracle-migration-create-index.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5511-oracle-migration-create-index.yaml @@ -1,7 +1,7 @@ --- type: fix issue: 5511 -backport: 6.10.1 +backport: 6.8.6,6.10.1 title: "Previously, when creating an index as a part of a migration, if the index already existed with a different name on Oracle, the migration would fail. This has been fixed so that the create index migration task now recovers with a warning message if the index already exists with a different name." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5563-mdm-dup-identifier.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5563-mdm-dup-identifier.yaml index a823391599a..458c6a38f16 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5563-mdm-dup-identifier.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5563-mdm-dup-identifier.yaml @@ -1,5 +1,6 @@ --- type: fix issue: 5563 +backport: 6.10.3 title: "Previously, certain mdm configuration could lead to duplicate eid identifier entries in golden resources. This has been corrected" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-contain-resources-with-fullurl.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-contain-resources-with-fullurl.yaml index 45c8d424e90..3cde6a5e393 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-contain-resources-with-fullurl.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-contain-resources-with-fullurl.yaml @@ -1,6 +1,7 @@ --- type: fix issue: 5589 +backport: 6.10.5 title: "When encoding a Bundle, if resources in bundle entries had a value in `Bundle.entry.fullUrl` but no value in `Bundle.entry.resource.id`, the parser sometimes incorrectly moved these resources to be contained within other diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-encode-placeholder-ids-in-rdf.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-encode-placeholder-ids-in-rdf.yaml index 28bf0b687a3..0070685b1c7 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-encode-placeholder-ids-in-rdf.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5589-dont-encode-placeholder-ids-in-rdf.yaml @@ -1,6 +1,8 @@ --- type: fix issue: 5589 +backport: 6.10.5 title: "When encoding resources using the RDF parser, placeholder IDs (i.e. resource IDs starting with `urn:`) were not omitted as they are in the XML and JSON parsers. This has been corrected." + diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5621-fix-potential-deadlock-on-caffeine-cash-on-conditional-create.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5621-fix-potential-deadlock-on-caffeine-cash-on-conditional-create.yaml index 5fb4b901ffe..1e203028c6b 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5621-fix-potential-deadlock-on-caffeine-cash-on-conditional-create.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5621-fix-potential-deadlock-on-caffeine-cash-on-conditional-create.yaml @@ -1,4 +1,5 @@ --- type: fix issue: 5621 +backport: 6.10.4 title: "Fixed a deadlock in resource conditional create." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5623-searching-with-multiple-bundle-composition-searchparameters-fix.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5623-searching-with-multiple-bundle-composition-searchparameters-fix.yaml index 1041359ccf0..9c99c5218d6 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5623-searching-with-multiple-bundle-composition-searchparameters-fix.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5623-searching-with-multiple-bundle-composition-searchparameters-fix.yaml @@ -1,5 +1,6 @@ --- type: fix issue: 5623 +backport: 6.10.5 title: "Previously, searches that used more than one chained `Bundle` `SearchParameter` (i.e. `Composition`) were only adding one condition to the underlying SQL query which resulted in incorrect search results. This has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_1/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_1/upgrade.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_1/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_1/version.yaml new file mode 100644 index 00000000000..636214dfb22 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_1/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2024-03-01" +codename: "Zed" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5671-tx-tangle.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5671-tx-tangle.yaml index 0224e092a74..3b7adddb8f1 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5671-tx-tangle.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5671-tx-tangle.yaml @@ -1,4 +1,5 @@ --- type: fix issue: 5671 +backport: 7.0.1 title: "Avoid lock contention by refreshing SearchParameter cache in a new transaction." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5717-reflected-xss-testpage-overlay-vulnerability.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5717-reflected-xss-testpage-overlay-vulnerability.yaml index fdc50e6da87..afd73567796 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5717-reflected-xss-testpage-overlay-vulnerability.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5717-reflected-xss-testpage-overlay-vulnerability.yaml @@ -1,4 +1,5 @@ --- type: security issue: 5717 +backport: 7.0.1 title: "Fixed a potential XSS vulnerability in the HAPI FHIR Testpage Overlay module." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5722-fix-pg-schema.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5722-fix-pg-schema.yaml index 4b7fcd1ebf6..825ab170ac2 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5722-fix-pg-schema.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5722-fix-pg-schema.yaml @@ -1,6 +1,7 @@ --- type: fix issue: 5722 +backport: 7.0.1 title: "An incorrect migration script caused a failure when upgrading to HAPI FHIR 7.0.0 on PostgreSQL if the database was not in the `public` schema. Thanks to GitHub user @pano-smals for the contribution!" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5742-new-language-searchparam-not-passed-to-generated-resource-provider.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5742-new-language-searchparam-not-passed-to-generated-resource-provider.yaml index 4c99e4c14e7..9205862a167 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5742-new-language-searchparam-not-passed-to-generated-resource-provider.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5742-new-language-searchparam-not-passed-to-generated-resource-provider.yaml @@ -1,4 +1,5 @@ --- type: fix issue: 5742 -title: Fixed behaviour of the _language query parameter. Now it is picked up as search parameter in the resource provider and filters accordingly. Thanks to Jens Villadsen (@jkiddo) for the contribution! +backport: 7.0.1 +title: "Fixed behaviour of the _language query parameter. Now it is picked up as search parameter in the resource provider and filters accordingly. Thanks to Jens Villadsen (@jkiddo) for the contribution!" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5748-avoid-lob-usage-in-batch2-and-search.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5748-avoid-lob-usage-in-batch2-and-search.yaml index 1eff1ab7cef..6c3ca11cff9 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5748-avoid-lob-usage-in-batch2-and-search.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5748-avoid-lob-usage-in-batch2-and-search.yaml @@ -1,6 +1,7 @@ --- type: perf issue: 5748 +backport: 6.8.7 title: "In the JPA server, several database columns related to Batch2 jobs and searching have been reworked so that they no will longer use LOB datatypes going forward. This is a significant advantage on Postgresql databases as it removes a significant use diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java new file mode 100644 index 00000000000..1f343735b83 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java @@ -0,0 +1,135 @@ +package ca.uhn.fhir.jpa.auth; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.api.RestOperationTypeEnum; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor; +import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule; +import ca.uhn.fhir.rest.server.interceptor.auth.IRuleApplier; +import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum; +import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder; +import ca.uhn.fhir.util.BundleBuilder; +import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.hl7.fhir.r4.model.CodeType; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.Patient; +import org.hl7.fhir.r4.model.StringType; +import org.junit.jupiter.api.Test; + +import java.util.HashSet; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +public class RuleImplOpTest { + private static final String OPERATION = "operation"; + private static final String TYPE = "type"; + private static final String PATH = "path"; + private static final String VALUE = "value"; + private static final String REPLACE = "replace"; + private static final String PATIENT_BIRTH_DATE = "Patient.birthDate"; + private static final Parameters PARAMETERS = buildParameters(); + private static final String DOCUMENT = "document"; + private static final String ERROR_TEMPLATE = "HAPI-0339: Can not handle nested Parameters with %s operation"; + private static final String ERROR_UPDATE = String.format(ERROR_TEMPLATE, "UPDATE"); + private static final String ERROR_CREATE = String.format(ERROR_TEMPLATE, "CREATE"); + + private static final String REQUEST_RULELIST = AuthorizationInterceptor.class.getName() + "_1_RULELIST"; + private final Patient myPatient = buildPatient(); + + private final List myRules = new RuleBuilder() + .allow() + .transaction() + .withAnyOperation() + .andApplyNormalRules() + .andThen() + .allow() + .write() + .allResources() + .withAnyId() + .build(); + + private final IAuthRule myRule = myRules.get(0); + private final FhirContext myFhirContext = FhirContext.forR4Cached(); + private final IBaseBundle myInnerBundle = buildInnerBundler(myFhirContext); + + private final RequestDetails mySystemRequestDetails = buildSystemRequestDetails(myFhirContext, myRules); + private final IRuleApplier myRuleApplier = new AuthorizationInterceptor(); + + @Test + void testTransactionBundleUpdateWithParameters() { + final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); + bundleBuilder.addTransactionUpdateEntry(PARAMETERS); + + try { + applyRule(bundleBuilder.getBundle()); + fail("Expected an InvalidRequestException"); + } catch (InvalidRequestException exception) { + assertEquals(ERROR_UPDATE, exception.getMessage()); + } + } + + @Test + void testTransactionBundleWithNestedBundle() { + final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); + bundleBuilder.addTransactionCreateEntry(myInnerBundle); + + try { + applyRule(bundleBuilder.getBundle()); + fail("Expected an InvalidRequestException"); + } catch (InvalidRequestException exception) { + assertEquals(ERROR_CREATE, exception.getMessage()); + } + } + + @Test + void testTransactionBundlePatchWithParameters() { + final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); + bundleBuilder.addTransactionFhirPatchEntry(myPatient.getIdElement(), PARAMETERS); + + final AuthorizationInterceptor.Verdict verdict = applyRule(bundleBuilder.getBundle()); + + assertThat(verdict.getDecision(), equalTo(PolicyEnum.ALLOW)); + } + + private AuthorizationInterceptor.Verdict applyRule(IBaseBundle theBundle) { + return myRule.applyRule(RestOperationTypeEnum.TRANSACTION, mySystemRequestDetails, theBundle, myPatient.getIdElement(), myPatient, myRuleApplier, new HashSet<>(), null); + } + + private static Parameters buildParameters() { + final Parameters patch = new Parameters(); + + final Parameters.ParametersParameterComponent op = patch.addParameter().setName(OPERATION); + op.addPart().setName(TYPE).setValue(new CodeType(REPLACE)); + op.addPart().setName(PATH).setValue(new CodeType(PATIENT_BIRTH_DATE)); + op.addPart().setName(VALUE).setValue(new StringType("1912-04-14")); + + return patch; + } + + private static RequestDetails buildSystemRequestDetails(FhirContext theFhirContext, List theRules) { + final SystemRequestDetails systemRequestDetails = new SystemRequestDetails(); + systemRequestDetails.setFhirContext(theFhirContext); + systemRequestDetails.getUserData().put(REQUEST_RULELIST, theRules); + + return systemRequestDetails; + } + + private static Patient buildPatient() { + final Patient patient = new Patient(); + patient.setId(new IdType("Patient", "1")); + return patient; + } + + private static IBaseBundle buildInnerBundler(FhirContext theFhirContext) { + final BundleBuilder innerBundleBuilder = new BundleBuilder(theFhirContext); + innerBundleBuilder.setType(DOCUMENT); + return innerBundleBuilder.getBundle(); + } +} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java index e6b08be356e..889341089fc 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java @@ -850,8 +850,16 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { return isBlank(url) || "/".equals(url); } + /** + * Ascertain whether this transaction request contains a nested operations or nested transactions. + * This is done carefully because a bundle can contain a nested PATCH with Parameters, which is supported but + * a non-PATCH nested Parameters resource may be problematic. + * + * @return true if we should reject this reject + */ private boolean isInvalidNestedParametersRequest( FhirContext theContext, BundleEntryParts theEntry, RestOperationTypeEnum theOperation) { + IBaseResource resource = theEntry.getResource(); if (resource == null) { return false; @@ -859,9 +867,10 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { RuntimeResourceDefinition resourceDefinition = theContext.getResourceDefinition(resource); final boolean isResourceParameters = PARAMETERS.equals(resourceDefinition.getName()); + final boolean isResourceBundle = BUNDLE.equals(resourceDefinition.getName()); final boolean isOperationPatch = theOperation == RestOperationTypeEnum.PATCH; - return isResourceParameters && !isOperationPatch; + return (isResourceParameters && !isOperationPatch) || isResourceBundle; } private void setTargetFromResourceId(RequestDetails theRequestDetails, FhirContext ctx, RuleTarget target) { From aeb42998643cdea6a5d02598a885b75195eb0eef Mon Sep 17 00:00:00 2001 From: Etienne Poirier <33007955+epeartree@users.noreply.github.com> Date: Fri, 19 Apr 2024 01:21:29 -0400 Subject: [PATCH 16/26] Postgres migrate from lob datatypes (#5837) * wip - before moving JpaEmbeddedDatabase to test-utilities * failing test * BinaryStorageEntity migration with associated test. * BinaryStorageEntity migration with associated test - fix finger fumble. * TermConceptProperty Entity migration with associated test. * TermValueSetConcept Entity migration with associated test. * TermConcept Entity migration * fixing merge issue * spotless * adding error code 2513 for exception * adding tests. * spotless * adding unique message code for exceptions. * fixing test failure - 1 * fixing test failure - 2 * fixing test failure - 3 * fixing test failure - 4 * fixing test failure - 5 * troubleshooting test failure * migration for audit/transaction logs. * spotless * preparation for initial code review. * adding changelog. * files headers * addressing comments from first code review. * modifying RenameTableTask to drop table with newTableName if configured to do so. * modifications to pass tests. * passing all tests --------- Co-authored-by: peartree --- .../ca/uhn/fhir/interceptor/api/Pointcut.java | 28 ++++ .../java/ca/uhn/fhir/util/VersionEnum.java | 4 + .../HapiFlywayMigrateDatabaseCommandTest.java | 2 + .../resources/persistence_create_h2_340.sql | 1 + ...-postgres-migration-from-lob-datatype.yaml | 5 + ... DatabaseBinaryContentStorageSvcImpl.java} | 89 ++++++++--- .../jpa/dao/data/IBinaryStorageEntityDao.java | 6 +- .../ca/uhn/fhir/jpa/entity/TermConcept.java | 28 +++- .../fhir/jpa/entity/TermConceptProperty.java | 61 +++++-- .../fhir/jpa/entity/TermValueSetConcept.java | 15 +- .../tasks/HapiFhirJpaMigrationTasks.java | 56 +++++++ .../fhir/jpa/packages/JpaPackageCache.java | 2 +- .../jpa/entity/TermConceptPropertyTest.java | 81 ++++++++++ .../jpa/model/entity/BinaryStorageEntity.java | 58 +++++-- .../binstore/BinaryAccessProviderTest.java | 16 +- ...abaseBinaryContentStorageSvcImplTest.java} | 139 +++++++++++----- .../FilesystemBinaryStorageSvcImplTest.java | 38 ++--- .../NullBinaryStorageSvcImplTest.java | 28 ++-- .../r4/BinaryAccessProviderR4Test.java | 8 +- ...rateColumBlobTypeToBinaryTypeTaskTest.java | 94 +++++++++++ ...grateColumnClobTypeToTextTypeTaskTest.java | 87 ++++++++++ .../HapiEmbeddedDatabasesExtension.java | 11 ++ .../releases/V5_1_0/data/POSTGRES_9_4.sql | 7 +- .../releases/V5_4_0/data/POSTGRES_9_4.sql | 2 +- .../releases/V7_2_0/data/POSTGRES_9_4.sql | 4 + .../BaseBinaryStorageSvcImplTest.java | 2 +- .../jpa/embedded/HapiSchemaMigrationTest.java | 41 +---- .../MigrateColumBlobTypeToBinaryTypeTask.java | 85 ++++++++++ .../MigrateColumnClobTypeToTextTypeTask.java | 84 ++++++++++ .../jpa/migrate/taskdef/RenameTableTask.java | 151 ++++++++++++++++++ .../migrate/tasks/api/BaseMigrationTasks.java | 9 +- .../fhir/jpa/migrate/tasks/api/Builder.java | 22 +++ .../fhir/jpa/migrate/taskdef/BaseTest.java | 2 + ...lobTypeToBinaryTypeTaskDbSpecificTest.java | 42 +++++ ...nClobTypeToTextTypeTaskDbSpecificTest.java | 47 ++++++ .../RenameTableTaskDbSpecificTest.java | 45 ++++++ .../migrate/taskdef/RenameTableTaskTest.java | 66 ++++++++ .../BulkExportJobParametersValidator.java | 3 +- .../BulkExportJobParametersValidatorTest.java | 4 +- .../jpa/binary/api/IBinaryStorageSvc.java | 28 ++-- .../fhir/jpa/binary/api/StoredDetails.java | 18 +-- .../interceptor/BinaryStorageInterceptor.java | 58 ++++--- .../binary/provider/BinaryAccessProvider.java | 10 +- .../binary/svc/BaseBinaryStorageSvcImpl.java | 78 +++++---- .../binary/svc/NullBinaryStorageSvcImpl.java | 18 +-- .../FilesystemBinaryStorageSvcImpl.java | 19 +-- .../binstore/MemoryBinaryStorageSvcImpl.java | 13 +- .../StaticLogbackCaptureTestExtension.java | 19 +++ 48 files changed, 1436 insertions(+), 298 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5838-postgres-migration-from-lob-datatype.yaml rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/{DatabaseBlobBinaryStorageSvcImpl.java => DatabaseBinaryContentStorageSvcImpl.java} (64%) create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java rename hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/{DatabaseBlobBinaryStorageSvcImplTest.java => DatabaseBinaryContentStorageSvcImplTest.java} (52%) create mode 100644 hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskTest.java create mode 100644 hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskTest.java create mode 100644 hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTask.java create mode 100644 hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTask.java create mode 100644 hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java create mode 100644 hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskDbSpecificTest.java create mode 100644 hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskDbSpecificTest.java create mode 100644 hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskDbSpecificTest.java create mode 100644 hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java index 8306311f503..d2c995a435e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java @@ -2951,7 +2951,9 @@ public enum Pointcut implements IPointcut { "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", "ca.uhn.fhir.jpa.util.SqlQueryList"), + @Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.") /** + * Deprecated but still supported. Will eventually be removed. Please use Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX * Binary Blob Prefix Assigning Hook: *

* Immediately before a binary blob is stored to its eventual data sink, this hook is called. @@ -2977,6 +2979,32 @@ public enum Pointcut implements IPointcut { "ca.uhn.fhir.rest.api.server.RequestDetails", "org.hl7.fhir.instance.model.api.IBaseResource"), + /** + * Binary Content Prefix Assigning Hook: + *

+ * Immediately before binary content is stored to its eventual data sink, this hook is called. + * This hook allows implementers to provide a prefix to the binary content's ID. + * This is helpful in cases where you want to identify this blob for later retrieval outside of HAPI-FHIR. Note that allowable characters will depend on the specific storage sink being used. + *

    + *
  • + * ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the + * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been + * pulled out of the servlet request. Note that the bean + * properties are not all guaranteed to be populated. + *
  • + *
  • + * org.hl7.fhir.instance.model.api.IBaseBinary - The binary resource that is about to be stored. + *
  • + *
+ *

+ * Hooks should return String, which represents the full prefix to be applied to the blob. + *

+ */ + STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX( + String.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "org.hl7.fhir.instance.model.api.IBaseResource"), + /** * Storage Hook: * Invoked before a batch job is persisted to the database. diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java index 1694426d1b7..2a097912ce7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java @@ -159,4 +159,8 @@ public enum VersionEnum { VersionEnum[] values = VersionEnum.values(); return values[values.length - 1]; } + + public boolean isNewerThan(VersionEnum theVersionEnum) { + return ordinal() > theVersionEnum.ordinal(); + } } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiFlywayMigrateDatabaseCommandTest.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiFlywayMigrateDatabaseCommandTest.java index 56cbd8dcc97..6ee9a3d99b4 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiFlywayMigrateDatabaseCommandTest.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiFlywayMigrateDatabaseCommandTest.java @@ -218,6 +218,8 @@ public class HapiFlywayMigrateDatabaseCommandTest { "-p", "SA" }; + assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE_BLOB")); + assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE")); assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE")); assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); App.main(args); diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/resources/persistence_create_h2_340.sql b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/resources/persistence_create_h2_340.sql index f939e74ef33..ac18d620292 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/resources/persistence_create_h2_340.sql +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/resources/persistence_create_h2_340.sql @@ -60,6 +60,7 @@ create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varch create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID)); create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID)); create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID)); +create table HFJ_BINARY_STORAGE_BLOB ( BLOB_ID varchar(200) not null, BLOB_DATA blob not null, CONTENT_TYPE varchar(100) not null, BLOB_HASH varchar(128), PUBLISHED_DATE timestamp(6) not null, RESOURCE_ID varchar(100) not null, BLOB_SIZE bigint, primary key (BLOB_ID) ); create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID); create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID); create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID); diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5838-postgres-migration-from-lob-datatype.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5838-postgres-migration-from-lob-datatype.yaml new file mode 100644 index 00000000000..af0349653f5 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5838-postgres-migration-from-lob-datatype.yaml @@ -0,0 +1,5 @@ +--- +type: perf +issue: 5838 +title: "Migration of remaining database columns still using the LOB datatypes. This change effectively cuts all ties +with the inefficient `pg_largeobject` table." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImpl.java similarity index 64% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImpl.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImpl.java index 7c5f0f5e092..8f79e37ecb9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImpl.java @@ -41,6 +41,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -50,7 +51,7 @@ import java.util.Date; import java.util.Optional; @Transactional -public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { +public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImpl { @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; @@ -61,9 +62,9 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { @Nonnull @Override @Transactional(propagation = Propagation.REQUIRED) - public StoredDetails storeBlob( + public StoredDetails storeBinaryContent( IIdType theResourceId, - String theBlobIdOrNull, + String theBinaryContentIdOrNull, String theContentType, InputStream theInputStream, RequestDetails theRequestDetails) @@ -82,14 +83,20 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { BinaryStorageEntity entity = new BinaryStorageEntity(); entity.setResourceId(theResourceId.toUnqualifiedVersionless().getValue()); - entity.setBlobContentType(theContentType); + entity.setContentType(theContentType); entity.setPublished(publishedDate); Session session = (Session) myEntityManager.getDelegate(); LobHelper lobHelper = session.getLobHelper(); + byte[] loadedStream = IOUtils.toByteArray(countingInputStream); - String id = super.provideIdForNewBlob(theBlobIdOrNull, loadedStream, theRequestDetails, theContentType); - entity.setBlobId(id); + String id = super.provideIdForNewBinaryContent( + theBinaryContentIdOrNull, loadedStream, theRequestDetails, theContentType); + + entity.setContentId(id); + entity.setStorageContentBin(loadedStream); + + // TODO: remove writing Blob in a future release Blob dataBlob = lobHelper.createBlob(loadedStream); entity.setBlob(dataBlob); @@ -103,7 +110,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { myEntityManager.persist(entity); return new StoredDetails() - .setBlobId(id) + .setBinaryContentId(id) .setBytes(bytes) .setPublished(publishedDate) .setHash(hash) @@ -111,68 +118,98 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { } @Override - public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) { + public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBinaryContentId) { Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( - theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); + theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue()); if (entityOpt.isEmpty()) { return null; } BinaryStorageEntity entity = entityOpt.get(); return new StoredDetails() - .setBlobId(theBlobId) - .setContentType(entity.getBlobContentType()) + .setBinaryContentId(theBinaryContentId) + .setContentType(entity.getContentType()) .setHash(entity.getHash()) .setPublished(entity.getPublished()) .setBytes(entity.getSize()); } @Override - public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException { + public boolean writeBinaryContent(IIdType theResourceId, String theBinaryContentId, OutputStream theOutputStream) + throws IOException { Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( - theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); + theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue()); if (entityOpt.isEmpty()) { return false; } - copyBlobToOutputStream(theOutputStream, entityOpt.get()); + copyBinaryContentToOutputStream(theOutputStream, entityOpt.get()); return true; } @Override - public void expungeBlob(IIdType theResourceId, String theBlobId) { + public void expungeBinaryContent(IIdType theResourceId, String theBinaryContentId) { Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( - theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); + theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue()); entityOpt.ifPresent( - theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getBlobId())); + theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getContentId())); } @Override - public byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException { + public byte[] fetchBinaryContent(IIdType theResourceId, String theBinaryContentId) throws IOException { BinaryStorageEntity entityOpt = myBinaryStorageEntityDao .findByIdAndResourceId( - theBlobId, theResourceId.toUnqualifiedVersionless().getValue()) + theBinaryContentId, + theResourceId.toUnqualifiedVersionless().getValue()) .orElseThrow(() -> new ResourceNotFoundException( - "Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId)); + "Unknown BinaryContent ID: " + theBinaryContentId + " for resource ID " + theResourceId)); - return copyBlobToByteArray(entityOpt); + return copyBinaryContentToByteArray(entityOpt); } - void copyBlobToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity) throws IOException { - try (InputStream inputStream = theEntity.getBlob().getBinaryStream()) { + void copyBinaryContentToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity) + throws IOException { + + try (InputStream inputStream = getBinaryContent(theEntity)) { IOUtils.copy(inputStream, theOutputStream); } catch (SQLException e) { throw new IOException(Msg.code(1341) + e); } } - byte[] copyBlobToByteArray(BinaryStorageEntity theEntity) throws IOException { - try { - return ByteStreams.toByteArray(theEntity.getBlob().getBinaryStream()); + byte[] copyBinaryContentToByteArray(BinaryStorageEntity theEntity) throws IOException { + byte[] retVal; + + try (InputStream inputStream = getBinaryContent(theEntity)) { + retVal = ByteStreams.toByteArray(inputStream); } catch (SQLException e) { throw new IOException(Msg.code(1342) + e); } + + return retVal; + } + + /** + * + * The caller is responsible for closing the returned stream. + * + * @param theEntity + * @return + * @throws SQLException + */ + private InputStream getBinaryContent(BinaryStorageEntity theEntity) throws SQLException { + InputStream retVal; + + if (theEntity.hasStorageContent()) { + retVal = new ByteArrayInputStream(theEntity.getStorageContentBin()); + } else if (theEntity.hasBlob()) { + retVal = theEntity.getBlob().getBinaryStream(); + } else { + retVal = new ByteArrayInputStream(new byte[0]); + } + + return retVal; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java index 6f76d4f72f4..c44c3b6c9fb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java @@ -29,11 +29,11 @@ import java.util.Optional; public interface IBinaryStorageEntityDao extends JpaRepository, IHapiFhirJpaRepository { - @Query("SELECT e FROM BinaryStorageEntity e WHERE e.myBlobId = :blob_id AND e.myResourceId = :resource_id") + @Query("SELECT e FROM BinaryStorageEntity e WHERE e.myContentId = :content_id AND e.myResourceId = :resource_id") Optional findByIdAndResourceId( - @Param("blob_id") String theBlobId, @Param("resource_id") String theResourceId); + @Param("content_id") String theContentId, @Param("resource_id") String theResourceId); @Modifying - @Query("DELETE FROM BinaryStorageEntity t WHERE t.myBlobId = :pid") + @Query("DELETE FROM BinaryStorageEntity t WHERE t.myContentId = :pid") void deleteByPid(@Param("pid") String theId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java index 491eccd187b..57674d4cfc7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java @@ -43,12 +43,14 @@ import jakarta.persistence.SequenceGenerator; import jakarta.persistence.Table; import jakarta.persistence.Temporal; import jakarta.persistence.TemporalType; +import jakarta.persistence.Transient; import jakarta.persistence.UniqueConstraint; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import org.hibernate.Length; import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.backend.types.Searchable; import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef; @@ -56,7 +58,10 @@ import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderR import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue; import org.hl7.fhir.r4.model.Coding; import java.io.Serializable; @@ -68,6 +73,8 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import static java.util.Objects.isNull; +import static java.util.Objects.nonNull; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @@ -165,15 +172,14 @@ public class TermConcept implements Serializable { @Column(name = "INDEX_STATUS", nullable = true) private Long myIndexStatus; + @Deprecated(since = "7.2.0") @Lob @Column(name = "PARENT_PIDS", nullable = true) - @FullTextField( - name = "myParentPids", - searchable = Searchable.YES, - projectable = Projectable.YES, - analyzer = "conceptParentPidsAnalyzer") private String myParentPids; + @Column(name = "PARENT_PIDS_VC", nullable = true, length = Length.LONG32) + private String myParentPidsVc; + @OneToMany( cascade = {}, fetch = FetchType.LAZY, @@ -356,8 +362,15 @@ public class TermConcept implements Serializable { return this; } + @Transient + @FullTextField( + name = "myParentPids", + searchable = Searchable.YES, + projectable = Projectable.YES, + analyzer = "conceptParentPidsAnalyzer") + @IndexingDependency(derivedFrom = @ObjectPath({@PropertyValue(propertyName = "myParentPidsVc")})) public String getParentPidsAsString() { - return myParentPids; + return nonNull(myParentPidsVc) ? myParentPidsVc : myParentPids; } public List getParents() { @@ -437,7 +450,7 @@ public class TermConcept implements Serializable { @PreUpdate @PrePersist public void prePersist() { - if (myParentPids == null) { + if (isNull(myParentPids) && isNull(myParentPidsVc)) { Set parentPids = new HashSet<>(); TermConcept entity = this; parentPids(entity, parentPids); @@ -464,6 +477,7 @@ public class TermConcept implements Serializable { } public TermConcept setParentPids(String theParentPids) { + myParentPidsVc = theParentPids; myParentPids = theParentPids; return this; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java index 7d61d2db172..6bbc3b178a4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.entity; import ca.uhn.fhir.util.ValidateUtil; +import com.google.common.annotations.VisibleForTesting; import jakarta.annotation.Nonnull; import jakarta.persistence.Column; import jakarta.persistence.Entity; @@ -41,6 +42,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import org.hibernate.Length; import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.backend.types.Searchable; @@ -68,7 +70,7 @@ import static org.apache.commons.lang3.StringUtils.length; public class TermConceptProperty implements Serializable { public static final int MAX_PROPTYPE_ENUM_LENGTH = 6; private static final long serialVersionUID = 1L; - private static final int MAX_LENGTH = 500; + public static final int MAX_LENGTH = 500; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn( @@ -106,10 +108,14 @@ public class TermConceptProperty implements Serializable { @GenericField(name = "myValueString", searchable = Searchable.YES) private String myValue; + @Deprecated(since = "7.2.0") @Column(name = "PROP_VAL_LOB") @Lob() private byte[] myValueLob; + @Column(name = "PROP_VAL_BIN", nullable = true, length = Length.LONG32) + private byte[] myValueBin; + @Enumerated(EnumType.ORDINAL) @Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH) @JdbcTypeCode(SqlTypes.INTEGER) @@ -196,8 +202,8 @@ public class TermConceptProperty implements Serializable { * property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property. */ public String getValue() { - if (hasValueLob()) { - return getValueLobAsString(); + if (hasValueBin()) { + return getValueBinAsString(); } return myValue; } @@ -208,36 +214,41 @@ public class TermConceptProperty implements Serializable { */ public TermConceptProperty setValue(String theValue) { if (theValue.length() > MAX_LENGTH) { - setValueLob(theValue); + setValueBin(theValue); } else { myValueLob = null; + myValueBin = null; } myValue = left(theValue, MAX_LENGTH); return this; } - public boolean hasValueLob() { + public boolean hasValueBin() { + if (myValueBin != null && myValueBin.length > 0) { + return true; + } + if (myValueLob != null && myValueLob.length > 0) { return true; } return false; } - public byte[] getValueLob() { - return myValueLob; - } - - public TermConceptProperty setValueLob(byte[] theValueLob) { - myValueLob = theValueLob; + public TermConceptProperty setValueBin(byte[] theValueBin) { + myValueBin = theValueBin; + myValueLob = theValueBin; return this; } - public TermConceptProperty setValueLob(String theValueLob) { - myValueLob = theValueLob.getBytes(StandardCharsets.UTF_8); - return this; + public TermConceptProperty setValueBin(String theValueBin) { + return setValueBin(theValueBin.getBytes(StandardCharsets.UTF_8)); } - public String getValueLobAsString() { + public String getValueBinAsString() { + if (myValueBin != null && myValueBin.length > 0) { + return new String(myValueBin, StandardCharsets.UTF_8); + } + return new String(myValueLob, StandardCharsets.UTF_8); } @@ -295,4 +306,24 @@ public class TermConceptProperty implements Serializable { public Long getPid() { return myId; } + + @VisibleForTesting + public byte[] getValueBlobForTesting() { + return myValueLob; + } + + @VisibleForTesting + public void setValueBlobForTesting(byte[] theValueLob) { + myValueLob = theValueLob; + } + + @VisibleForTesting + public byte[] getValueBinForTesting() { + return myValueBin; + } + + @VisibleForTesting + public void setValueBinForTesting(byte[] theValuebin) { + myValueBin = theValuebin; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java index 3920fe7f729..2e17bc6ae9c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java @@ -40,11 +40,13 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import org.hibernate.Length; import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import static org.apache.commons.lang3.StringUtils.isNotEmpty; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @@ -98,10 +100,14 @@ public class TermValueSetConcept implements Serializable { @Column(name = "SOURCE_PID", nullable = true) private Long mySourceConceptPid; + @Deprecated(since = "7.2.0") @Lob @Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true) private String mySourceConceptDirectParentPids; + @Column(name = "SOURCE_DIRECT_PARENT_PIDS_VC", nullable = true, length = Length.LONG32) + private String mySourceConceptDirectParentPidsVc; + @Column(name = "SYSTEM_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH) private String mySystem; @@ -264,7 +270,7 @@ public class TermValueSetConcept implements Serializable { .append("valueSetName", this.getValueSetName()) .append("display", myDisplay) .append("designationCount", myDesignations != null ? myDesignations.size() : "(null)") - .append("parentPids", mySourceConceptDirectParentPids) + .append("parentPids", getSourceConceptDirectParentPids()) .toString(); } @@ -282,5 +288,12 @@ public class TermValueSetConcept implements Serializable { public void setSourceConceptDirectParentPids(String theSourceConceptDirectParentPids) { mySourceConceptDirectParentPids = theSourceConceptDirectParentPids; + mySourceConceptDirectParentPidsVc = theSourceConceptDirectParentPids; + } + + public String getSourceConceptDirectParentPids() { + return isNotEmpty(mySourceConceptDirectParentPidsVc) + ? mySourceConceptDirectParentPidsVc + : mySourceConceptDirectParentPids; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 25094f71020..86e27081679 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -139,6 +139,62 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { forcedId.dropIndex("20240402.2", "IDX_FORCEDID_RESID"); forcedId.dropIndex("20240402.3", "IDX_FORCEDID_TYPE_FID"); forcedId.dropIndex("20240402.4", "IDX_FORCEID_FID"); + + // Migration from LOB + { + Builder.BuilderWithTableName binaryStorageBlobTable = version.onTable("HFJ_BINARY_STORAGE_BLOB"); + + binaryStorageBlobTable + .renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID") + .renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE") + .renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH"); + + binaryStorageBlobTable + .modifyColumn("20240404.4", "BLOB_DATA") + .nullable() + .withType(ColumnTypeEnum.BLOB); + + binaryStorageBlobTable + .addColumn("20240404.5", "STORAGE_CONTENT_BIN") + .nullable() + .type(ColumnTypeEnum.BINARY); + + binaryStorageBlobTable.migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN"); + + binaryStorageBlobTable.renameTable("20240404.7", "HFJ_BINARY_STORAGE"); + } + + { + Builder.BuilderWithTableName termConceptPropertyTable = version.onTable("TRM_CONCEPT_PROPERTY"); + + termConceptPropertyTable + .addColumn("20240409.1", "PROP_VAL_BIN") + .nullable() + .type(ColumnTypeEnum.BINARY); + + termConceptPropertyTable.migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN"); + } + + { + Builder.BuilderWithTableName termValueSetConceptTable = version.onTable("TRM_VALUESET_CONCEPT"); + termValueSetConceptTable + .addColumn("20240409.3", "SOURCE_DIRECT_PARENT_PIDS_VC") + .nullable() + .type(ColumnTypeEnum.TEXT); + + termValueSetConceptTable.migrateClobToText( + "20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC"); + } + + { + Builder.BuilderWithTableName termConceptTable = version.onTable("TRM_CONCEPT"); + termConceptTable + .addColumn("20240410.1", "PARENT_PIDS_VC") + .nullable() + .type(ColumnTypeEnum.TEXT); + + termConceptTable.migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC"); + } } protected void init700() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java index 03d03b9cd0f..1d306f609ce 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java @@ -243,7 +243,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac */ private byte[] fetchBlobFromBinary(IBaseBinary theBinary) throws IOException { if (myBinaryStorageSvc != null && !(myBinaryStorageSvc instanceof NullBinaryStorageSvcImpl)) { - return myBinaryStorageSvc.fetchDataBlobFromBinary(theBinary); + return myBinaryStorageSvc.fetchDataByteArrayFromBinary(theBinary); } else { byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue(); if (value == null) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java new file mode 100644 index 00000000000..44a01649cfd --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java @@ -0,0 +1,81 @@ +package ca.uhn.fhir.jpa.entity; + +import com.google.common.base.Strings; +import org.junit.jupiter.api.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; + +public class TermConceptPropertyTest { + + private static final String ourVeryLongString = Strings.repeat("a", TermConceptProperty.MAX_LENGTH+1); + + @Test + public void testSetValue_whenValueExceedsMAX_LENGTH_willWriteToBlobAndBin(){ + // given + TermConceptProperty termConceptProperty = new TermConceptProperty(); + + // when + termConceptProperty.setValue(ourVeryLongString); + + // then + assertThat(termConceptProperty.getValueBlobForTesting(), notNullValue()); + assertThat(termConceptProperty.getValueBinForTesting(), notNullValue()); + } + + @Test + public void testHasValueBin_willDefaultToAssertingValueBin(){ + // given + TermConceptProperty termConceptProperty = new TermConceptProperty(); + termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes()); + termConceptProperty.setValueBlobForTesting(null); + + // when/then + assertThat(termConceptProperty.hasValueBin(), is(true)); + + } + + @Test + public void testHasValueBin_willAssertValueBlob_whenValueBinNotPresent(){ + // given + TermConceptProperty termConceptProperty = new TermConceptProperty(); + termConceptProperty.setValueBinForTesting(null); + termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes()); + + // when/then + assertThat(termConceptProperty.hasValueBin(), is(true)); + + } + + @Test + public void testGetValue_whenValueExceedsMAX_LENGTH_willGetValueBinByDefault(){ + // given + TermConceptProperty termConceptProperty = new TermConceptProperty(); + termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes()); + termConceptProperty.setValueBlobForTesting(null); + + // when + String value = termConceptProperty.getValue(); + + // then + assertThat(value, startsWith("a")); + + } + + @Test + public void testGetValue_whenOnlyValueBlobIsSet_willGetValueValueBlob(){ + // given + TermConceptProperty termConceptProperty = new TermConceptProperty(); + termConceptProperty.setValueBinForTesting(null); + termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes()); + + // when + String value = termConceptProperty.getValue(); + + // then + assertThat(value, startsWith("a")); + } + +} diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java index e5dd9152596..c71b8e29fda 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java @@ -26,37 +26,44 @@ import jakarta.persistence.Lob; import jakarta.persistence.Table; import jakarta.persistence.Temporal; import jakarta.persistence.TemporalType; +import org.hibernate.Length; import java.sql.Blob; import java.util.Date; +import static java.util.Objects.nonNull; + @Entity -@Table(name = "HFJ_BINARY_STORAGE_BLOB") +@Table(name = "HFJ_BINARY_STORAGE") public class BinaryStorageEntity { @Id - @Column(name = "BLOB_ID", length = 200, nullable = false) - // N.B GGG: Note that the `blob id` is the same as the `externalized binary id`. - private String myBlobId; + @Column(name = "CONTENT_ID", length = 200, nullable = false) + // N.B GGG: Note that the `content id` is the same as the `externalized binary id`. + private String myContentId; @Column(name = "RESOURCE_ID", length = 100, nullable = false) private String myResourceId; - @Column(name = "BLOB_SIZE", nullable = true) + @Column(name = "CONTENT_SIZE", nullable = true) private long mySize; @Column(name = "CONTENT_TYPE", nullable = false, length = 100) - private String myBlobContentType; + private String myContentType; - @Lob - @Column(name = "BLOB_DATA", nullable = false, insertable = true, updatable = false) + @Deprecated(since = "7.2.0") + @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later + @Column(name = "BLOB_DATA", nullable = true, insertable = true, updatable = false) private Blob myBlob; + @Column(name = "STORAGE_CONTENT_BIN", nullable = true, length = Length.LONG32) + private byte[] myStorageContentBin; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "PUBLISHED_DATE", nullable = false) private Date myPublished; - @Column(name = "BLOB_HASH", length = 128, nullable = true) + @Column(name = "CONTENT_HASH", length = 128, nullable = true) private String myHash; public Date getPublished() { @@ -71,8 +78,8 @@ public class BinaryStorageEntity { return myHash; } - public void setBlobId(String theBlobId) { - myBlobId = theBlobId; + public void setContentId(String theContentId) { + myContentId = theContentId; } public void setResourceId(String theResourceId) { @@ -83,12 +90,12 @@ public class BinaryStorageEntity { return mySize; } - public String getBlobContentType() { - return myBlobContentType; + public String getContentType() { + return myContentType; } - public void setBlobContentType(String theBlobContentType) { - myBlobContentType = theBlobContentType; + public void setContentType(String theContentType) { + myContentType = theContentType; } public Blob getBlob() { @@ -99,8 +106,8 @@ public class BinaryStorageEntity { myBlob = theBlob; } - public String getBlobId() { - return myBlobId; + public String getContentId() { + return myContentId; } public void setSize(long theSize) { @@ -110,4 +117,21 @@ public class BinaryStorageEntity { public void setHash(String theHash) { myHash = theHash; } + + public byte[] getStorageContentBin() { + return myStorageContentBin; + } + + public BinaryStorageEntity setStorageContentBin(byte[] theStorageContentBin) { + myStorageContentBin = theStorageContentBin; + return this; + } + + public boolean hasStorageContent() { + return nonNull(myStorageContentBin); + } + + public boolean hasBlob() { + return nonNull(myBlob); + } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/BinaryAccessProviderTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/BinaryAccessProviderTest.java index 3e4062c5c67..1e61ecc21ee 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/BinaryAccessProviderTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/BinaryAccessProviderTest.java @@ -103,7 +103,7 @@ public class BinaryAccessProviderTest { ServletOutputStream sos = spy(ServletOutputStream.class); when(myDaoRegistry.getResourceDao(eq("DocumentReference"))).thenReturn(myResourceDao); when(myResourceDao.read(any(), any(), anyBoolean())).thenReturn(docRef); - when(myBinaryStorageSvc.fetchBlobDetails(any(), any())).thenReturn(blobDetails); + when(myBinaryStorageSvc.fetchBinaryContentDetails(any(), any())).thenReturn(blobDetails); when(theServletResponse.getOutputStream()).thenReturn(sos); myBinaryAccessProvider.setTargetAttachmentIdForUnitTest(true); @@ -111,8 +111,8 @@ public class BinaryAccessProviderTest { myBinaryAccessProvider.binaryAccessRead(docRef.getIdElement(), new StringType("DocumentReference.content.attachment"), myRequestDetails, theServletRequest, theServletResponse); } catch (IOException e) { } - verify(myBinaryStorageSvc, times(1)).fetchBlobDetails(any(), any()); - verify(myBinaryStorageSvc, times(1)).writeBlob(any(), any(), any()); + verify(myBinaryStorageSvc, times(1)).fetchBinaryContentDetails(any(), any()); + verify(myBinaryStorageSvc, times(1)).writeBinaryContent(any(), any(), any()); verify(theServletResponse, times(1)).setStatus(200); verify(theServletResponse, times(1)).setContentType(any()); verify(theServletResponse, times(1)).setContentLength(0); @@ -132,7 +132,7 @@ public class BinaryAccessProviderTest { } catch (InvalidRequestException | IOException e) { assertEquals(Msg.code(1331) + "Can not find the requested binary content. It may have been deleted.", e.getMessage()); } - verify(myBinaryStorageSvc, times(1)).fetchBlobDetails(any(), any()); + verify(myBinaryStorageSvc, times(1)).fetchBinaryContentDetails(any(), any()); } @Test @@ -247,16 +247,16 @@ public class BinaryAccessProviderTest { DaoMethodOutcome daoOutcome = new DaoMethodOutcome(); daoOutcome.setResource(docRef); StoredDetails sd = spy(StoredDetails.class); - sd.setBlobId("123"); + sd.setBinaryContentId("123"); sd.setBytes(15); when(myDaoRegistry.getResourceDao(eq("DocumentReference"))).thenReturn(myResourceDao); when(myResourceDao.read(any(), any(), anyBoolean())).thenReturn(docRef); when(myResourceDao.update(docRef, myRequestDetails)).thenReturn(daoOutcome); when(theServletRequest.getContentType()).thenReturn("Integer"); when(theServletRequest.getContentLength()).thenReturn(15); - when(myBinaryStorageSvc.shouldStoreBlob(15, docRef.getIdElement(), "Integer")).thenReturn(true); + when(myBinaryStorageSvc.shouldStoreBinaryContent(15, docRef.getIdElement(), "Integer")).thenReturn(true); myRequestDetails.setServletRequest(theServletRequest); - doReturn(sd).when(myBinaryStorageSvc).storeBlob(eq(docRef.getIdElement()), isNull(), eq("Integer"), any(InputStream.class), any(RequestDetails.class)); + doReturn(sd).when(myBinaryStorageSvc).storeBinaryContent(eq(docRef.getIdElement()), isNull(), eq("Integer"), any(InputStream.class), any(RequestDetails.class)); myRequestDetails.setRequestContents(SOME_BYTES); try { @@ -265,7 +265,7 @@ public class BinaryAccessProviderTest { assertEquals(docRef.getId(), outcome.getIdElement().getValue()); } catch (IOException e) { } - verify(myBinaryStorageSvc, times(1)).storeBlob(any(), any(), any(), any(), any(ServletRequestDetails.class)); + verify(myBinaryStorageSvc, times(1)).storeBinaryContent(any(), any(), any(), any(), any(ServletRequestDetails.class)); } @Test diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImplTest.java similarity index 52% rename from hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImplTest.java rename to hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImplTest.java index 9c887951d8f..a0c6b7c4279 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImplTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.binstore; import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc; import ca.uhn.fhir.jpa.binary.api.StoredDetails; +import ca.uhn.fhir.jpa.dao.data.IBinaryStorageEntityDao; import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; @@ -20,9 +21,11 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.sql.Blob; import java.sql.SQLException; +import java.util.Optional; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.matchesPattern; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -32,16 +35,21 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -@ContextConfiguration(classes = DatabaseBlobBinaryStorageSvcImplTest.MyConfig.class) -public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { +@ContextConfiguration(classes = DatabaseBinaryContentStorageSvcImplTest.MyConfig.class) +public class DatabaseBinaryContentStorageSvcImplTest extends BaseJpaR4Test { private static final byte[] SOME_BYTES = {2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3, 2, 1}; @Autowired - @Qualifier("databaseBlobBinaryStorageSvc") + @Qualifier("databaseBinaryContentStorageSvc") private IBinaryStorageSvc mySvc; + @Autowired + private IBinaryStorageEntityDao myBinaryStorageEntityDao; + @Test public void testStoreAndRetrieve() throws IOException { @@ -53,7 +61,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES); String contentType = "image/png"; IdType resourceId = new IdType("Binary/123"); - StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails()); + StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails()); myCaptureQueriesListener.logAllQueriesForCurrentThread(); @@ -63,16 +71,16 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { myCaptureQueriesListener.clear(); - assertThat(outcome.getBlobId(), matchesPattern("^[a-zA-Z0-9]{100}$")); + assertThat(outcome.getBinaryContentId(), matchesPattern("^[a-zA-Z0-9]{100}$")); assertEquals(16, outcome.getBytes()); /* * Read back the details */ - StoredDetails details = mySvc.fetchBlobDetails(resourceId, outcome.getBlobId()); + StoredDetails details = mySvc.fetchBinaryContentDetails(resourceId, outcome.getBinaryContentId()); assertEquals(16L, details.getBytes()); - assertEquals(outcome.getBlobId(), details.getBlobId()); + assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId()); assertEquals("image/png", details.getContentType()); assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash()); assertNotNull(details.getPublished()); @@ -82,10 +90,10 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { */ ByteArrayOutputStream capture = new ByteArrayOutputStream(); - mySvc.writeBlob(resourceId, outcome.getBlobId(), capture); + mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture); assertArrayEquals(SOME_BYTES, capture.toByteArray()); - assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(resourceId, outcome.getBlobId())); + assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(resourceId, outcome.getBinaryContentId())); } @Test @@ -106,8 +114,8 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES); String contentType = "image/png"; IdType resourceId = new IdType("Binary/123"); - StoredDetails outcome = mySvc.storeBlob(resourceId, "ABCDEFG", contentType, inputStream, new ServletRequestDetails()); - assertEquals("ABCDEFG", outcome.getBlobId()); + StoredDetails outcome = mySvc.storeBinaryContent(resourceId, "ABCDEFG", contentType, inputStream, new ServletRequestDetails()); + assertEquals("ABCDEFG", outcome.getBinaryContentId()); myCaptureQueriesListener.logAllQueriesForCurrentThread(); @@ -123,9 +131,9 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { * Read back the details */ - StoredDetails details = mySvc.fetchBlobDetails(resourceId, outcome.getBlobId()); + StoredDetails details = mySvc.fetchBinaryContentDetails(resourceId, outcome.getBinaryContentId()); assertEquals(16L, details.getBytes()); - assertEquals(outcome.getBlobId(), details.getBlobId()); + assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId()); assertEquals("image/png", details.getContentType()); assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash()); assertNotNull(details.getPublished()); @@ -135,43 +143,42 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { */ ByteArrayOutputStream capture = new ByteArrayOutputStream(); - mySvc.writeBlob(resourceId, outcome.getBlobId(), capture); + mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture); assertArrayEquals(SOME_BYTES, capture.toByteArray()); - assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(resourceId, outcome.getBlobId())); + assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(resourceId, outcome.getBinaryContentId())); } @Test - public void testFetchBlobUnknown() throws IOException { + public void testFetchBinaryContentUnknown() throws IOException { try { - mySvc.fetchBlob(new IdType("Patient/123"), "1111111"); + mySvc.fetchBinaryContent(new IdType("Patient/123"), "1111111"); fail(); } catch (ResourceNotFoundException e) { - assertEquals("Unknown blob ID: 1111111 for resource ID Patient/123", e.getMessage()); + assertEquals("Unknown BinaryContent ID: 1111111 for resource ID Patient/123", e.getMessage()); } - StoredDetails details = mySvc.fetchBlobDetails(new IdType("Patient/123"), "1111111"); + StoredDetails details = mySvc.fetchBinaryContentDetails(new IdType("Patient/123"), "1111111"); assertNull(details); } @Test public void testExpunge() throws IOException { - /* * Store the binary */ ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES); String contentType = "image/png"; IdType resourceId = new IdType("Binary/123"); - StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails()); - String blobId = outcome.getBlobId(); + StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails()); + String blobId = outcome.getBinaryContentId(); // Expunge - mySvc.expungeBlob(resourceId, blobId); + mySvc.expungeBinaryContent(resourceId, blobId); ByteArrayOutputStream capture = new ByteArrayOutputStream(); - assertFalse(mySvc.writeBlob(resourceId, outcome.getBlobId(), capture)); + assertFalse(mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture)); assertEquals(0, capture.size()); } @@ -179,30 +186,29 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { @Test public void testWrongResourceId() throws IOException { - /* * Store the binary */ ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES); String contentType = "image/png"; IdType resourceId = new IdType("Binary/123"); - StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails()); + StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails()); // Right ID ByteArrayOutputStream capture = new ByteArrayOutputStream(); - assertTrue(mySvc.writeBlob(resourceId, outcome.getBlobId(), capture)); + assertTrue(mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture)); assertEquals(16, capture.size()); // Wrong ID capture = new ByteArrayOutputStream(); - assertFalse(mySvc.writeBlob(new IdType("Patient/9999"), outcome.getBlobId(), capture)); + assertFalse(mySvc.writeBinaryContent(new IdType("Patient/9999"), outcome.getBinaryContentId(), capture)); assertEquals(0, capture.size()); } @Test - public void testCopyBlobToOutputStream_Exception() throws SQLException { - DatabaseBlobBinaryStorageSvcImpl svc = new DatabaseBlobBinaryStorageSvcImpl(); + public void testCopyBinaryContentToOutputStream_Exception() throws SQLException { + DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl(); BinaryStorageEntity mockInput = new BinaryStorageEntity(); Blob blob = mock(Blob.class); @@ -210,7 +216,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { mockInput.setBlob(blob); try { - svc.copyBlobToOutputStream(new ByteArrayOutputStream(), (mockInput)); + svc.copyBinaryContentToOutputStream(new ByteArrayOutputStream(), (mockInput)); fail(); } catch (IOException e) { assertThat(e.getMessage(), containsString("FOO")); @@ -218,8 +224,8 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { } @Test - public void testCopyBlobToByteArray_Exception() throws SQLException { - DatabaseBlobBinaryStorageSvcImpl svc = new DatabaseBlobBinaryStorageSvcImpl(); + public void testCopyBinaryContentToByteArray_Exception() throws SQLException { + DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl(); BinaryStorageEntity mockInput = new BinaryStorageEntity(); Blob blob = mock(Blob.class); @@ -227,20 +233,81 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test { mockInput.setBlob(blob); try { - svc.copyBlobToByteArray(mockInput); + svc.copyBinaryContentToByteArray(mockInput); fail(); } catch (IOException e) { assertThat(e.getMessage(), containsString("FOO")); } } + @Test + public void testReadBinaryStorageEntity_whenHasBinaryContent_defaultsToBinaryContent() throws IOException { + // given + DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl(); + + BinaryStorageEntity mockInput = mock(BinaryStorageEntity.class); + when(mockInput.hasStorageContent()).thenReturn(true); + when(mockInput.getStorageContentBin()).thenReturn(SOME_BYTES); + + // when + svc.copyBinaryContentToByteArray(mockInput); + + // then + verify(mockInput, times(0)).hasBlob(); + verify(mockInput, times(0)).getBlob(); + + } + + @Test + public void testReadBinaryStorageEntity_whenHasBlobOnly_willReadBlobContent() throws IOException { + // given + DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl(); + + BinaryStorageEntity mockInput = mock(BinaryStorageEntity.class); + when(mockInput.hasStorageContent()).thenReturn(false); + when(mockInput.hasBlob()).thenReturn(true); + when(mockInput.getBlob()).thenAnswer(t ->{ + Blob blob = mock(Blob.class); + when(blob.getBinaryStream()).thenReturn(new ByteArrayInputStream(SOME_BYTES)); + return blob; + } ); + + // when + svc.copyBinaryContentToByteArray(mockInput); + + // then + verify(mockInput, times(1)).hasBlob(); + verify(mockInput, times(1)).getBlob(); + } + + @Test + public void testStoreBinaryContent_writesBlobAndByteArray() throws IOException { + // given + ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES); + String contentType = "image/png"; + IdType resourceId = new IdType("Binary/123"); + + // when + StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails()); + + runInTransaction(() -> { + Optional binaryStorageEntityOptional = myBinaryStorageEntityDao.findByIdAndResourceId(outcome.getBinaryContentId(), resourceId.toUnqualifiedVersionless().getValue()); + BinaryStorageEntity binaryStorageEntity = binaryStorageEntityOptional.get(); + + // then + assertThat(binaryStorageEntity.hasStorageContent(), is(true)); + assertThat(binaryStorageEntity.hasBlob(), is(true)); + }); + + } + @Configuration public static class MyConfig { @Primary @Bean - public IBinaryStorageSvc databaseBlobBinaryStorageSvc() { - return new DatabaseBlobBinaryStorageSvcImpl(); + public IBinaryStorageSvc databaseBinaryContentStorageSvc() { + return new DatabaseBinaryContentStorageSvcImpl(); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImplTest.java index aa2af639fe7..a511cce7400 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImplTest.java @@ -50,22 +50,22 @@ public class FilesystemBinaryStorageSvcImplTest { public void testStoreAndRetrieve() throws IOException { IIdType id = new IdType("Patient/123"); String contentType = "image/png"; - StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); + StoredDetails outcome = mySvc.storeBinaryContent(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); ourLog.info("Got id: {}", outcome); - StoredDetails details = mySvc.fetchBlobDetails(id, outcome.getBlobId()); + StoredDetails details = mySvc.fetchBinaryContentDetails(id, outcome.getBinaryContentId()); assertEquals(16L, details.getBytes()); - assertEquals(outcome.getBlobId(), details.getBlobId()); + assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId()); assertEquals("image/png", details.getContentType()); assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash()); assertNotNull(details.getPublished()); ByteArrayOutputStream capture = new ByteArrayOutputStream(); - mySvc.writeBlob(id, outcome.getBlobId(), capture); + mySvc.writeBinaryContent(id, outcome.getBinaryContentId(), capture); assertArrayEquals(SOME_BYTES, capture.toByteArray()); - assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(id, outcome.getBlobId())); + assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(id, outcome.getBinaryContentId())); } @Test @@ -73,30 +73,30 @@ public class FilesystemBinaryStorageSvcImplTest { IIdType id = new IdType("Patient/123"); String contentType = "image/png"; String blobId = "ABCDEFGHIJKLMNOPQRSTUV"; - StoredDetails outcome = mySvc.storeBlob(id, blobId, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); - assertEquals(blobId, outcome.getBlobId()); + StoredDetails outcome = mySvc.storeBinaryContent(id, blobId, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); + assertEquals(blobId, outcome.getBinaryContentId()); ourLog.info("Got id: {}", outcome); - StoredDetails details = mySvc.fetchBlobDetails(id, outcome.getBlobId()); + StoredDetails details = mySvc.fetchBinaryContentDetails(id, outcome.getBinaryContentId()); assertEquals(16L, details.getBytes()); - assertEquals(outcome.getBlobId(), details.getBlobId()); + assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId()); assertEquals("image/png", details.getContentType()); assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash()); assertNotNull(details.getPublished()); ByteArrayOutputStream capture = new ByteArrayOutputStream(); - mySvc.writeBlob(id, outcome.getBlobId(), capture); + mySvc.writeBinaryContent(id, outcome.getBinaryContentId(), capture); assertArrayEquals(SOME_BYTES, capture.toByteArray()); - assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(id, outcome.getBlobId())); + assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(id, outcome.getBinaryContentId())); } @Test - public void testFetchBlobUnknown() throws IOException { + public void testFetchBinaryContentUnknown() throws IOException { try { - mySvc.fetchBlob(new IdType("Patient/123"), "1111111"); + mySvc.fetchBinaryContent(new IdType("Patient/123"), "1111111"); fail(); } catch (ResourceNotFoundException e) { assertEquals(Msg.code(1327) + "Unknown blob ID: 1111111 for resource ID Patient/123", e.getMessage()); @@ -108,21 +108,21 @@ public class FilesystemBinaryStorageSvcImplTest { public void testExpunge() throws IOException { IIdType id = new IdType("Patient/123"); String contentType = "image/png"; - StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); + StoredDetails outcome = mySvc.storeBinaryContent(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); ourLog.info("Got id: {}", outcome); - StoredDetails details = mySvc.fetchBlobDetails(id, outcome.getBlobId()); + StoredDetails details = mySvc.fetchBinaryContentDetails(id, outcome.getBinaryContentId()); assertEquals(16L, details.getBytes()); - assertEquals(outcome.getBlobId(), details.getBlobId()); + assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId()); assertEquals("image/png", details.getContentType()); assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash()); assertNotNull(details.getPublished()); - mySvc.expungeBlob(id, outcome.getBlobId()); + mySvc.expungeBinaryContent(id, outcome.getBinaryContentId()); ByteArrayOutputStream capture = new ByteArrayOutputStream(); - mySvc.writeBlob(id, outcome.getBlobId(), capture); + mySvc.writeBinaryContent(id, outcome.getBinaryContentId(), capture); assertEquals(0, capture.size()); } @@ -134,7 +134,7 @@ public class FilesystemBinaryStorageSvcImplTest { IIdType id = new IdType("Patient/123"); String contentType = "image/png"; try { - mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); + mySvc.storeBinaryContent(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails()); fail(); } catch (PayloadTooLargeException e) { assertEquals(Msg.code(1343) + "Binary size exceeds maximum: 5", e.getMessage()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/NullBinaryStorageSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/NullBinaryStorageSvcImplTest.java index 052c0dffff7..bcd19db546f 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/NullBinaryStorageSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/NullBinaryStorageSvcImplTest.java @@ -12,37 +12,37 @@ public class NullBinaryStorageSvcImplTest { private final NullBinaryStorageSvcImpl mySvc = new NullBinaryStorageSvcImpl(); @Test - public void shouldStoreBlob() { - assertFalse(mySvc.shouldStoreBlob(1, new IdType("Patient/2"), "application/json")); + public void shouldStoreBinaryContent() { + assertFalse(mySvc.shouldStoreBinaryContent(1, new IdType("Patient/2"), "application/json")); } @Test - public void storeBlob() { - assertThrows(UnsupportedOperationException.class, () -> mySvc.storeBlob(null, null, null, null, null)); + public void storeBinaryContent() { + assertThrows(UnsupportedOperationException.class, () -> mySvc.storeBinaryContent(null, null, null, null, null)); } @Test - public void fetchBlobDetails() { - assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBlobDetails(null, null)); + public void fetchBinaryContentDetails() { + assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBinaryContentDetails(null, null)); } @Test - public void writeBlob() { - assertThrows(UnsupportedOperationException.class, () -> mySvc.writeBlob(null, null, null)); + public void writeBinaryContent() { + assertThrows(UnsupportedOperationException.class, () -> mySvc.writeBinaryContent(null, null, null)); } @Test - public void expungeBlob() { - assertThrows(UnsupportedOperationException.class, () -> mySvc.expungeBlob(null, null)); + public void expungeBinaryContent() { + assertThrows(UnsupportedOperationException.class, () -> mySvc.expungeBinaryContent(null, null)); } @Test - public void fetchBlob() { - assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBlob(null, null)); + public void fetchBinaryContent() { + assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBinaryContent(null, null)); } @Test - public void newBlobId() { - assertThrows(UnsupportedOperationException.class, () -> mySvc.newBlobId()); + public void newBinaryContentId() { + assertThrows(UnsupportedOperationException.class, () -> mySvc.newBinaryContentId()); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryAccessProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryAccessProviderR4Test.java index 4b0a31b647a..2f1d7f77d38 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryAccessProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryAccessProviderR4Test.java @@ -264,7 +264,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test { } - myBinaryStorageSvc.expungeBlob(id, attachmentId); + myBinaryStorageSvc.expungeBinaryContent(id, attachmentId); path = myServerBase + "/DocumentReference/" + id.getIdPart() + "/" + @@ -721,7 +721,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test { } ByteArrayOutputStream capture = new ByteArrayOutputStream(); - myStorageSvc.writeBlob(id, attachmentId, capture); + myStorageSvc.writeBinaryContent(id, attachmentId, capture); assertEquals(15, capture.size()); // Now delete (logical delete- should not expunge the binary) @@ -734,7 +734,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test { } capture = new ByteArrayOutputStream(); - myStorageSvc.writeBlob(id, attachmentId, capture); + myStorageSvc.writeBinaryContent(id, attachmentId, capture); assertEquals(15, capture.size()); // Now expunge @@ -748,7 +748,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test { .execute(); capture = new ByteArrayOutputStream(); - assertFalse(myStorageSvc.writeBlob(id, attachmentId, capture)); + assertFalse(myStorageSvc.writeBinaryContent(id, attachmentId, capture)); assertEquals(0, capture.size()); } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskTest.java new file mode 100644 index 00000000000..4befd20c04f --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskTest.java @@ -0,0 +1,94 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase; +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import ca.uhn.fhir.jpa.migrate.HapiMigrator; +import ca.uhn.fhir.jpa.migrate.SchemaMigrator; +import ca.uhn.fhir.util.VersionEnum; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class MigrateColumBlobTypeToBinaryTypeTaskTest { + + private static PostgresEmbeddedDatabase ourPostgresEmbeddedDatabase; + + @BeforeAll + public static void beforeAll(){ + ourPostgresEmbeddedDatabase = new PostgresEmbeddedDatabase(); + } + + private HapiMigrator myMigrator; + + @BeforeEach + public void beforeEach(){ + myMigrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, ourPostgresEmbeddedDatabase.getDataSource(), DriverTypeEnum.POSTGRES_9_4); + myMigrator.createMigrationTableIfRequired(); + } + + @Test + public void testMigrationTask_OidValueIsCopiedIntoBytea(){ + // given + final String expectedString ="Hello world!"; + + ourPostgresEmbeddedDatabase.executeSqlAsBatch(List.of( + "create table HFJ_STORAGE_WITH_BLOB (BLOB_DATA oid, STORAGE_CONTENT_BIN bytea)", + "select lo_create(1234)", // create empty LOB with id 1234 + "select lo_put(1234, 0, '\\x48656c6c6f20776f726c6421')", // insert data (Hello world!) in the LOB with id 1234 + "insert into HFJ_STORAGE_WITH_BLOB (BLOB_DATA) values (1234)" // assign LOB id to colum + )); + + // when + BaseTask task = new MigrateColumBlobTypeToBinaryTypeTask( + VersionEnum.V7_2_0.toString(), + "1", + "HFJ_STORAGE_WITH_BLOB", + "BLOB_DATA", // colum of oid type + "STORAGE_CONTENT_BIN" // colum of bytea type + ); + + myMigrator.addTask(task); + myMigrator.migrate(); + + // then + List> rows = ourPostgresEmbeddedDatabase.query("select * from HFJ_STORAGE_WITH_BLOB"); + + assertThat(rows, hasSize(1)); + + Map stringObjectMap = rows.get(0); + + String storedContent = convertToString(stringObjectMap, "storage_content_bin"); + + assertThat(storedContent, equalTo(expectedString)); + + } + + public String convertToString(Map theMap, String theKey){ + Object o = theMap.get(theKey); + + byte[] convertedOidToBytea = ((byte[])o); + return new String(convertedOidToBytea); + } + + @AfterEach + public void afterEach(){ + ourPostgresEmbeddedDatabase.clearDatabase(); + } + + @AfterAll + public static void afterAll(){ + + ourPostgresEmbeddedDatabase.stop(); + + } + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskTest.java new file mode 100644 index 00000000000..4a3117c3f76 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskTest.java @@ -0,0 +1,87 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase; +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import ca.uhn.fhir.jpa.migrate.HapiMigrator; +import ca.uhn.fhir.jpa.migrate.SchemaMigrator; +import ca.uhn.fhir.util.VersionEnum; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class MigrateColumnClobTypeToTextTypeTaskTest { + + private static PostgresEmbeddedDatabase ourPostgresEmbeddedDatabase; + + @BeforeAll + public static void beforeAll(){ + ourPostgresEmbeddedDatabase = new PostgresEmbeddedDatabase(); + } + + private HapiMigrator myMigrator; + + @BeforeEach + public void beforeEach(){ + myMigrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, ourPostgresEmbeddedDatabase.getDataSource(), DriverTypeEnum.POSTGRES_9_4); + myMigrator.createMigrationTableIfRequired(); + } + + @Test + public void testMigrationTask_OidValueIsCopiedIntoText(){ + // given + final String expectedString ="Hello world!"; + + ourPostgresEmbeddedDatabase.executeSqlAsBatch(List.of( + "create table HFJ_STORAGE_WITH_OID (OID_DATA oid, STORAGE_CONTENT_TEXT text)", + "select lo_create(1234)", // create empty LOB with id 1234 + "select lo_put(1234, 0, 'Hello world!')", // insert data in the LOB with id 1234 + "insert into HFJ_STORAGE_WITH_OID (OID_DATA) values (1234)" // assign LOB id to colum + )); + + // when + BaseTask task = new MigrateColumnClobTypeToTextTypeTask( + VersionEnum.V7_2_0.toString(), + "1", + "HFJ_STORAGE_WITH_OID", + "OID_DATA", // colum of oid type + "STORAGE_CONTENT_TEXT" // colum of bytea type + ); + + myMigrator.addTask(task); + myMigrator.migrate(); + + // then + List> rows = ourPostgresEmbeddedDatabase.query("select * from HFJ_STORAGE_WITH_OID"); + + assertThat(rows, hasSize(1)); + + Map stringObjectMap = rows.get(0); + + String storedContent = (String) stringObjectMap.get("storage_content_text"); + + assertThat(storedContent, equalTo(expectedString)); + + } + + @AfterEach + public void afterEach(){ + ourPostgresEmbeddedDatabase.clearDatabase(); + } + + @AfterAll + public static void afterAll(){ + + ourPostgresEmbeddedDatabase.stop(); + + } + +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java index 8d81ef4c03e..f25a382bb64 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java @@ -104,6 +104,17 @@ public class HapiEmbeddedDatabasesExtension implements AfterAllCallback { myDatabaseInitializerHelper.insertPersistenceTestData(getEmbeddedDatabase(theDriverType), theVersionEnum); } + public void maybeInsertPersistenceTestData(DriverTypeEnum theDriverType, VersionEnum theVersionEnum) { + try { + myDatabaseInitializerHelper.insertPersistenceTestData(getEmbeddedDatabase(theDriverType), theVersionEnum); + } catch (Exception theE) { + ourLog.info( + "Could not insert persistence test data most likely because we don't have any for version {} and driver {}", + theVersionEnum, + theDriverType); + } + } + public String getSqlFromResourceFile(String theFileName) { try { ourLog.info("Loading file: {}", theFileName); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_1_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_1_0/data/POSTGRES_9_4.sql index e5b708d5543..a4de9c25a99 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_1_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_1_0/data/POSTGRES_9_4.sql @@ -48,7 +48,7 @@ INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1780, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:08.82', '2023-05-01 15:26:08.82', FALSE, 'F30D68F5D00D440BD35E9CF7CCF1250234106A4B52AE016ACEE0F4291FB5AF5F', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'CODESYSTEM', 1); INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1788, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:12.178', '2023-05-01 15:26:12.178', FALSE, 'EDBD79B1632719D08AA733AF7F99DE17009D1CE4605F439B4F5BDAAC98AABDD8', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'VALUESET', 1); INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1796, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:12.422', '2023-05-01 15:26:12.422', FALSE, 'A71FACD8BB1828ACD8718959949D5A16BE31F2E350671C5326A8E0BB9799357E', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'CONCEPTMAP', 1); -INSERT INTO HFJ_BINARY_STORAGE_BLOB (BLOB_ID, BLOB_DATA, CONTENT_TYPE, BLOB_HASH, PUBLISHED_DATE, RESOURCE_ID, BLOB_SIZE) VALUES ('QE7FP9VMEDPQTKOL9ENJXJEXBCIPTZKP2WOGWLMGDKJMKWXONGLLWBCUGDM2YH88ZXSG6V5PGDTSMQDMOXHEXBEFNOE0GIURJPOD', 1415364, 'APPLICATION/XML', 'D28743293C2FB67B72D06C80D055B66CCB58D70030E460450CD661AE2ED7225E', '2023-05-01 16:15:16.469', 'BINARY/2003', 36781); +INSERT INTO HFJ_BINARY_STORAGE_BLOB (BLOB_ID, BLOB_DATA, CONTENT_TYPE, BLOB_HASH, PUBLISHED_DATE, RESOURCE_ID, BLOB_SIZE) VALUES ('QE7FP9VMEDPQTKOL9ENJXJEXBCIPTZKP2WOGWLMGDKJMKWXONGLLWBCUGDM2YH88ZXSG6V5PGDTSMQDMOXHEXBEFNOE0GIURJPOD', lo_create(1415364), 'APPLICATION/XML', 'D28743293C2FB67B72D06C80D055B66CCB58D70030E460450CD661AE2ED7225E', '2023-05-01 16:15:16.469', 'BINARY/2003', 36781); INSERT INTO HFJ_BLK_EXPORT_JOB (PID, CREATED_TIME, EXP_TIME, JOB_ID, REQUEST, EXP_SINCE, JOB_STATUS, STATUS_MESSAGE, STATUS_TIME, OPTLOCK) VALUES (1, '2023-05-01 16:31:20.175', '2023-05-01 18:31:20.175', '077DCFC3-DDC1-4C83-BD15-82867912F79D', '/$EXPORT?_OUTPUTFORMAT=APPLICATION%2FFHIR%2BNDJSON', '2023-04-30 16:31:20.171', 'COMPLETE', NULL, '2023-05-01 16:31:36.011', 1); INSERT INTO HFJ_BLK_EXPORT_COLLECTION (PID, TYPE_FILTER, RES_TYPE, OPTLOCK, JOB_PID) VALUES (131, NULL, 'PATIENT', 0, 1); INSERT INTO HFJ_BLK_EXPORT_COLFILE (PID, RES_ID, COLLECTION_PID) VALUES (13, '2018', 131); @@ -98,8 +98,8 @@ INSERT INTO NPM_PACKAGE_VER (PID, CURRENT_VERSION, PKG_DESC, DESC_UPPER, FHIR_VE INSERT INTO NPM_PACKAGE_VER_RES (PID, CANONICAL_URL, CANONICAL_VERSION, FILE_DIR, FHIR_VERSION, FHIR_VERSION_ID, FILE_NAME, RES_SIZE_BYTES, RES_TYPE, UPDATED_TIME, PACKVER_PID, BINARY_RES_ID) VALUES (1, NULL, NULL, 'PACKAGE', 'R4', '4.0.1', 'TESTPATIENT.JSON', 225, 'PATIENT', '2023-05-01 15:22:38.057', 1, 2); INSERT INTO TRM_CODESYSTEM (PID, CODE_SYSTEM_URI, CURRENT_VERSION_PID, CS_NAME, RES_ID) VALUES (1, 'HTTP://LOINC.ORG', 54, 'LOINC', 1780); INSERT INTO TRM_CODESYSTEM_VER (PID, CS_DISPLAY, CODESYSTEM_PID, CS_VERSION_ID, RES_ID) VALUES (54, 'LOINC', 1, NULL, 1780); -INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (150, 'LL1001-8', 54, 'V2.67 PHENX05_14_30D FREQ AMTS', 1, '1415721', NULL, '2023-05-01 17:02:39.139'); -INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (151, 'LA13892-7', 54, 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', 1, '1415722', 3, '2023-05-01 17:02:39.14'); +INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (150, 'LL1001-8', 54, 'V2.67 PHENX05_14_30D FREQ AMTS', 1, lo_create(1415721), NULL, '2023-05-01 17:02:39.139'); +INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (151, 'LA13892-7', 54, 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', 1, lo_create(1415722), 3, '2023-05-01 17:02:39.14'); INSERT INTO TRM_CONCEPT_DESIG (PID, LANG, USE_CODE, USE_DISPLAY, USE_SYSTEM, VAL, CS_VER_PID, CONCEPT_PID) VALUES (105, 'NL', '900000000000013009', 'SYNONYM', 'HTTP://SNOMED.INFO/SCT', 'SYSTOLISCHE BLOEDDRUK - EXPIRATIE', 54, 150); INSERT INTO TRM_CONCEPT_MAP (PID, RES_ID, SOURCE_URL, TARGET_URL, URL) VALUES (54, 1796, NULL, NULL, 'HTTP://LOINC.ORG/CM/LOINC-PARTS-TO-PUBCHEM'); INSERT INTO TRM_CONCEPT_MAP_GROUP (PID, CONCEPT_MAP_URL, SOURCE_URL, SOURCE_VS, SOURCE_VERSION, TARGET_URL, TARGET_VS, TARGET_VERSION, CONCEPT_MAP_PID) VALUES (54, NULL, 'HTTP://LOINC.ORG', NULL, NULL, 'HTTP://PUBCHEM.NCBI.NLM.NIH.GOV', NULL, NULL, 54); @@ -107,6 +107,7 @@ INSERT INTO TRM_CONCEPT_MAP_GRP_ELEMENT (PID, SOURCE_CODE, CONCEPT_MAP_URL, SOUR INSERT INTO TRM_CONCEPT_MAP_GRP_ELM_TGT (PID, TARGET_CODE, CONCEPT_MAP_URL, TARGET_DISPLAY, TARGET_EQUIVALENCE, SYSTEM_URL, SYSTEM_VERSION, VALUESET_URL, CONCEPT_MAP_GRP_ELM_PID) VALUES (60, '1054', NULL, 'PYRIDOXINE', 'EQUAL', NULL, NULL, NULL, 60); INSERT INTO TRM_CONCEPT_PC_LINK (PID, CHILD_PID, CODESYSTEM_PID, PARENT_PID, REL_TYPE) VALUES (54, 150, 54, 151, 0); INSERT INTO TRM_CONCEPT_PROPERTY (PID, PROP_CODESYSTEM, PROP_DISPLAY, PROP_KEY, PROP_TYPE, PROP_VAL, PROP_VAL_LOB, CS_VER_PID, CONCEPT_PID) VALUES (152, NULL, NULL, 'CLASSTYPE', 0, '2', NULL, 54, 150); +INSERT INTO TRM_CONCEPT_PROPERTY (PID, PROP_CODESYSTEM, PROP_DISPLAY, PROP_KEY, PROP_TYPE, PROP_VAL, PROP_VAL_LOB, CS_VER_PID, CONCEPT_PID) VALUES (153, NULL, NULL, 'CLASSTYPE', 0, NULL, lo_create(1415723), 54, 150); INSERT INTO TRM_VALUESET (PID, EXPANSION_STATUS, VSNAME, RES_ID, TOTAL_CONCEPT_DESIGNATIONS, TOTAL_CONCEPTS, URL) VALUES (59, 'EXPANDED', 'LOINC UNIVERSAL ORDER SET', 1788, 0, 0, 'HTTP://LOINC.ORG/VS/LOINC-UNIVERSAL-ORDER-SET'); INSERT INTO TRM_VALUESET_CONCEPT (PID, CODEVAL, DISPLAY, VALUESET_ORDER, SYSTEM_URL, VALUESET_PID) VALUES (176, 'LA13892-7', 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', 0, 'HTTP://LOINC.ORG', 59); INSERT INTO TRM_VALUESET_C_DESIGNATION (PID, VALUESET_CONCEPT_PID, LANG, USE_CODE, USE_DISPLAY, USE_SYSTEM, VAL, VALUESET_PID) VALUES (4, 176, NULL, NULL, NULL, NULL, 'NM THYROID STUDY REPORT', 59); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql index 7b976a4c826..eb1ebd370d7 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V5_4_0/data/POSTGRES_9_4.sql @@ -54,7 +54,7 @@ INSERT INTO TRM_VALUESET_CONCEPT ( 'HTTP://LOINC.ORG', 59, 1, - 10820244, + lo_create(10820244), 4824 ); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_2_0/data/POSTGRES_9_4.sql b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_2_0/data/POSTGRES_9_4.sql index e84689abc35..e52db927eca 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_2_0/data/POSTGRES_9_4.sql +++ b/hapi-fhir-jpaserver-test-utilities/src/main/resources/migration/releases/V7_2_0/data/POSTGRES_9_4.sql @@ -1 +1,5 @@ INSERT INTO TRM_CONCEPT_MAP_GRP_ELM_TGT (PID, TARGET_CODE, CONCEPT_MAP_URL, TARGET_DISPLAY, TARGET_EQUIVALENCE, SYSTEM_URL, SYSTEM_VERSION, VALUESET_URL, CONCEPT_MAP_GRP_ELM_PID) VALUES (61, NULL, NULL, 'PYRIDOXINE', 'UNMATCHED', NULL, NULL, NULL, 60); +INSERT INTO HFJ_BINARY_STORAGE (CONTENT_ID, RESOURCE_ID, CONTENT_TYPE, STORAGE_CONTENT_BIN, PUBLISHED_DATE ) VALUES ('1', '2', 'TEXT', '\x48656c6c6f20776f726c6421', '2023-06-15 09:58:42.92'); +INSERT INTO TRM_CONCEPT (PID, CODEVAL, PARENT_PIDS_VC ) VALUES (1, 'aCode', '1 2 3 4'); +INSERT INTO TRM_CONCEPT_PROPERTY (PID, PROP_KEY, PROP_VAL_BIN, PROP_TYPE) VALUES (1, 'key', '\x48656c6c6f20776f726c6421', 1); +INSERT INTO TRM_VALUESET_CONCEPT (PID, VALUESET_PID, VALUESET_ORDER, SOURCE_DIRECT_PARENT_PIDS_VC, SYSTEM_URL, CODEVAL) VALUES (1, 59, 1, '1 2 3 4 5 6', 'http://systemUlr', 'codeVal'); diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/binstore/BaseBinaryStorageSvcImplTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/binstore/BaseBinaryStorageSvcImplTest.java index b3e60c333d0..18214a6121c 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/binstore/BaseBinaryStorageSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/binstore/BaseBinaryStorageSvcImplTest.java @@ -19,7 +19,7 @@ public class BaseBinaryStorageSvcImplTest { svc.setFhirContextForTests(FhirContext.forR4Cached()); svc.setInterceptorBroadcasterForTests(new InterceptorService()); - String id = svc.newBlobId(); + String id = svc.newBinaryContentId(); ourLog.info(id); assertThat(id, matchesPattern("^[a-zA-Z0-9]{100}$")); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java index b73cde3172e..f1700e1e5b8 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java @@ -75,34 +75,12 @@ public class HapiSchemaMigrationTest { HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, theDriverType, HAPI_FHIR_MIGRATION_TABLENAME); HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao); - VersionEnum[] allVersions = VersionEnum.values(); + for (VersionEnum aVersion : VersionEnum.values()) { + ourLog.info("Applying migrations for {}", aVersion); + migrate(theDriverType, dataSource, hapiMigrationStorageSvc, aVersion); - List dataVersions = List.of( - VersionEnum.V5_2_0, - VersionEnum.V5_3_0, - VersionEnum.V5_4_0, - VersionEnum.V5_5_0, - VersionEnum.V5_7_0, - VersionEnum.V6_0_0, - VersionEnum.V6_1_0, - VersionEnum.V6_2_0, - VersionEnum.V6_3_0, - VersionEnum.V6_6_0, - VersionEnum.V6_8_0, - VersionEnum.V7_0_0, - VersionEnum.V7_2_0 - ); - - int fromVersion = 0; - VersionEnum from = allVersions[fromVersion]; - VersionEnum toVersion; - - for (int i = 0; i < allVersions.length; i++) { - toVersion = allVersions[i]; - ourLog.info("Applying migrations for {}", toVersion); - migrate(theDriverType, dataSource, hapiMigrationStorageSvc, toVersion); - if (dataVersions.contains(toVersion)) { - myEmbeddedServersExtension.insertPersistenceTestData(theDriverType, toVersion); + if (aVersion.isNewerThan(FIRST_TESTED_VERSION)) { + myEmbeddedServersExtension.maybeInsertPersistenceTestData(theDriverType, aVersion); } } @@ -119,14 +97,6 @@ public class HapiSchemaMigrationTest { verifyForcedIdMigration(dataSource); } - private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum from, VersionEnum to) throws SQLException { - MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getTaskList(from, to); - SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc); - schemaMigrator.setDriverType(theDriverType); - schemaMigrator.createMigrationTableIfRequired(); - schemaMigrator.migrate(); - } - private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) throws SQLException { MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(new VersionEnum[]{to}); SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc); @@ -169,5 +139,4 @@ public class HapiSchemaMigrationTest { assertFalse(schemaMigrator.createMigrationTableIfRequired()); } - } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTask.java new file mode 100644 index 00000000000..b6ebe11fc31 --- /dev/null +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTask.java @@ -0,0 +1,85 @@ +/*- + * #%L + * HAPI FHIR Server - SQL Migration + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.i18n.Msg; + +import java.sql.SQLException; + +public class MigrateColumBlobTypeToBinaryTypeTask extends BaseTableColumnTask { + + private final String myFromColumName; + private final String myToColumName; + + public MigrateColumBlobTypeToBinaryTypeTask( + String theProductVersion, + String theSchemaVersion, + String theTableName, + String theFromColumName, + String theToColumName) { + super(theProductVersion, theSchemaVersion); + myFromColumName = theFromColumName; + myToColumName = theToColumName; + + setTableName(theTableName); + } + + @Override + public void validate() { + setDescription("Migrating BLob (oid) from colum " + myFromColumName + " to BINARY on colum " + myToColumName + + " for table " + getTableName() + " (only affects Postgresql)"); + } + + @Override + protected void doExecute() throws SQLException { + String sql = buildSqlStatement(); + + executeSql(getTableName(), sql); + } + + String buildSqlStatement() { + String tableName = getTableName().toLowerCase(); + String fromColumName = myFromColumName.toLowerCase(); + String toColumName = myToColumName.toLowerCase(); + + String retVal; + + switch (getDriverType()) { + case MYSQL_5_7: + case DERBY_EMBEDDED: + case ORACLE_12C: + case MARIADB_10_1: + case COCKROACHDB_21_1: + case H2_EMBEDDED: + case MSSQL_2012: + retVal = "update " + tableName + " set " + toColumName + " = " + fromColumName + " where " + + fromColumName + " is not null"; + break; + case POSTGRES_9_4: + retVal = "update " + tableName + " set " + toColumName + " = lo_get(" + fromColumName + ") where " + + fromColumName + " is not null"; + break; + default: + throw new IllegalStateException(Msg.code(2514) + "Driver is not supported or null."); + } + + return retVal; + } +} diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTask.java new file mode 100644 index 00000000000..760fac382cb --- /dev/null +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTask.java @@ -0,0 +1,84 @@ +/*- + * #%L + * HAPI FHIR Server - SQL Migration + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.i18n.Msg; + +import java.sql.SQLException; + +public class MigrateColumnClobTypeToTextTypeTask extends BaseTableColumnTask { + + private final String myFromColumName; + private final String myToColumName; + + public MigrateColumnClobTypeToTextTypeTask( + String theProductVersion, + String theSchemaVersion, + String theTableName, + String theFromColumName, + String theToColumName) { + super(theProductVersion, theSchemaVersion); + myFromColumName = theFromColumName; + myToColumName = theToColumName; + + setTableName(theTableName); + } + + @Override + public void validate() { + setDescription("Migrating CLob (oid) from colum " + myFromColumName + " to " + myToColumName + + ".TEXT for table " + getTableName() + " (only affects Postgresql)"); + } + + @Override + protected void doExecute() throws SQLException { + String sql = buildSqlStatement(); + executeSql(getTableName(), sql); + } + + String buildSqlStatement() { + String tableName = getTableName().toLowerCase(); + String fromColumName = myFromColumName.toLowerCase(); + String toColumName = myToColumName.toLowerCase(); + + String retVal; + + switch (getDriverType()) { + case MYSQL_5_7: + case DERBY_EMBEDDED: + case ORACLE_12C: + case MARIADB_10_1: + case COCKROACHDB_21_1: + case H2_EMBEDDED: + case MSSQL_2012: + retVal = "update " + tableName + " set " + toColumName + " = " + fromColumName + " where " + + fromColumName + " is not null"; + break; + case POSTGRES_9_4: + retVal = "update " + tableName + " set " + toColumName + " = convert_from(lo_get(" + fromColumName + + "), 'UTF8') where " + fromColumName + " is not null"; + break; + default: + throw new IllegalStateException(Msg.code(2515)); + } + + return retVal; + } +} diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java new file mode 100644 index 00000000000..49615e73c1a --- /dev/null +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java @@ -0,0 +1,151 @@ +/*- + * #%L + * HAPI FHIR Server - SQL Migration + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.migrate.JdbcUtils; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.intellij.lang.annotations.Language; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.jdbc.core.ColumnMapRowMapper; +import org.springframework.jdbc.core.JdbcTemplate; + +import java.sql.SQLException; +import java.util.Set; + +public class RenameTableTask extends BaseTableTask { + + private static final Logger ourLog = LoggerFactory.getLogger(RenameTableTask.class); + + private final String myOldTableName; + private final String myNewTableName; + private boolean myDeleteTargetColumnFirstIfExist = true; + + public RenameTableTask( + String theProductVersion, String theSchemaVersion, String theOldTableName, String theNewTableName) { + super(theProductVersion, theSchemaVersion); + myOldTableName = theOldTableName; + myNewTableName = theNewTableName; + } + + @Override + public void validate() { + setDescription("Rename table " + getOldTableName()); + } + + private void handleTableWithNewTableName() throws SQLException { + + if (!myDeleteTargetColumnFirstIfExist) { + throw new SQLException(Msg.code(2517) + "Can not rename " + getOldTableName() + " to " + getNewTableName() + + " because a table with name " + getNewTableName() + " already exists"); + } + + // a table with the new tableName already exists and we can delete it. we will only do so if it is empty. + Integer rowsWithData = getConnectionProperties().getTxTemplate().execute(t -> { + String sql = "SELECT * FROM " + getNewTableName(); + JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate(); + jdbcTemplate.setMaxRows(1); + return jdbcTemplate.query(sql, new ColumnMapRowMapper()).size(); + }); + + if (rowsWithData != null && rowsWithData > 0) { + throw new SQLException(Msg.code(2518) + "Can not rename " + getOldTableName() + " to " + getNewTableName() + + " because a table with name " + getNewTableName() + " already exists and is populated."); + } + + logInfo( + ourLog, + "Table {} already exists - Going to drop it before renaming table {} to {}", + getNewTableName(), + getOldTableName(), + getNewTableName()); + + @Language("SQL") + String sql = "DROP TABLE " + getNewTableName(); + executeSql(getNewTableName(), sql); + } + + @Override + public void doExecute() throws SQLException { + + Set tableNames = JdbcUtils.getTableNames(getConnectionProperties()); + boolean hasTableWithNewTableName = tableNames.contains(getNewTableName()); + + if (!tableNames.contains(getOldTableName())) { + throw new SQLException(Msg.code(2516) + "Can not rename " + getOldTableName() + " to " + getNewTableName() + + " because the original table does not exists"); + } + + if (hasTableWithNewTableName) { + handleTableWithNewTableName(); + } + + String sql = buildRenameTableSqlStatement(); + logInfo(ourLog, "Renaming table: {}", getOldTableName()); + + executeSql(getOldTableName(), sql); + } + + public void setDeleteTargetColumnFirstIfExist(boolean theDeleteTargetColumnFirstIfExist) { + myDeleteTargetColumnFirstIfExist = theDeleteTargetColumnFirstIfExist; + } + + public String getNewTableName() { + return myNewTableName; + } + + public String getOldTableName() { + return myOldTableName; + } + + String buildRenameTableSqlStatement() { + String retVal; + + final String oldTableName = getOldTableName(); + final String newTableName = getNewTableName(); + + switch (getDriverType()) { + case MYSQL_5_7: + case DERBY_EMBEDDED: + retVal = "rename table " + oldTableName + " to " + newTableName; + break; + case ORACLE_12C: + case MARIADB_10_1: + case POSTGRES_9_4: + case COCKROACHDB_21_1: + case H2_EMBEDDED: + retVal = "alter table " + oldTableName + " rename to " + newTableName; + break; + case MSSQL_2012: + retVal = "sp_rename '" + oldTableName + "', '" + newTableName + "'"; + break; + default: + throw new IllegalStateException(Msg.code(2513)); + } + return retVal; + } + + protected void generateHashCode(HashCodeBuilder theBuilder) { + super.generateHashCode(theBuilder); + theBuilder.append(myOldTableName); + theBuilder.append(myNewTableName); + } +} diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java index 49d93342d0b..ad702ad3d6a 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java @@ -31,6 +31,8 @@ import org.flywaydb.core.api.MigrationVersion; import java.util.Collection; +import static java.util.Objects.nonNull; + public class BaseMigrationTasks { MigrationVersion lastVersion; private Multimap myTasks = @@ -71,7 +73,7 @@ public class BaseMigrationTasks { return theRelease.name(); } - public MigrationTaskList getAllTasks(T[] theVersionEnumValues) { + public MigrationTaskList getAllTasks(T... theVersionEnumValues) { MigrationTaskList retval = new MigrationTaskList(); for (T nextVersion : theVersionEnumValues) { Collection nextValues = myTasks.get(nextVersion); @@ -84,6 +86,11 @@ public class BaseMigrationTasks { return retval; } + public boolean hasTasksForVersion(T theRelease) { + Collection baseTasks = myTasks.get(theRelease); + return nonNull(baseTasks) && !baseTasks.isEmpty(); + } + protected BaseTask getTaskWithVersion(String theMigrationVersion) { // First normalize the version number String expectedVersion = diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java index 387cea1aeb7..ffc800f9a9f 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java @@ -39,11 +39,14 @@ import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask; import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteRawSqlTask; import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteTaskPrecondition; import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask; +import ca.uhn.fhir.jpa.migrate.taskdef.MigrateColumBlobTypeToBinaryTypeTask; +import ca.uhn.fhir.jpa.migrate.taskdef.MigrateColumnClobTypeToTextTypeTask; import ca.uhn.fhir.jpa.migrate.taskdef.MigratePostgresTextClobToBinaryClobTask; import ca.uhn.fhir.jpa.migrate.taskdef.ModifyColumnTask; import ca.uhn.fhir.jpa.migrate.taskdef.NopTask; import ca.uhn.fhir.jpa.migrate.taskdef.RenameColumnTask; import ca.uhn.fhir.jpa.migrate.taskdef.RenameIndexTask; +import ca.uhn.fhir.jpa.migrate.taskdef.RenameTableTask; import org.apache.commons.lang3.Validate; import org.intellij.lang.annotations.Language; import org.slf4j.Logger; @@ -320,6 +323,11 @@ public class Builder { addTask(task); } + public void renameTable(String theVersion, String theNewTableName) { + RenameTableTask task = new RenameTableTask(myRelease, theVersion, getTableName(), theNewTableName); + addTask(task); + } + public void migratePostgresTextClobToBinaryClob(String theVersion, String theColumnName) { MigratePostgresTextClobToBinaryClobTask task = new MigratePostgresTextClobToBinaryClobTask(myRelease, theVersion); @@ -328,6 +336,20 @@ public class Builder { addTask(task); } + public void migrateBlobToBinary(String theVersion, String theFromColumName, String theToColumName) { + MigrateColumBlobTypeToBinaryTypeTask task = new MigrateColumBlobTypeToBinaryTypeTask( + myRelease, theVersion, getTableName(), theFromColumName, theToColumName); + + addTask(task); + } + + public void migrateClobToText(String theVersion, String theFromColumName, String theToColumName) { + MigrateColumnClobTypeToTextTypeTask task = new MigrateColumnClobTypeToTextTypeTask( + myRelease, theVersion, getTableName(), theFromColumName, theToColumName); + + addTask(task); + } + public class BuilderAddIndexWithName { private final String myVersion; private final String myIndexName; diff --git a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java index b729e82bafb..2bc06ae4775 100644 --- a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java +++ b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java @@ -136,6 +136,7 @@ public abstract class BaseTest { if (getConnectionProperties() != null) { Set tableNames = JdbcUtils.getTableNames(getConnectionProperties()); if (tableNames.contains(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME)) { + ourLog.info("Deleting entries in " + SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME); executeSql("DELETE from " + SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME + " where \"installed_rank\" > 0"); } } @@ -162,6 +163,7 @@ public abstract class BaseTest { public void after() { if (myConnectionProperties != null) { myConnectionProperties.close(); + ourLog.info("connectionProperties was closed"); } } diff --git a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskDbSpecificTest.java b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskDbSpecificTest.java new file mode 100644 index 00000000000..28b99c7fba9 --- /dev/null +++ b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumBlobTypeToBinaryTypeTaskDbSpecificTest.java @@ -0,0 +1,42 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.*; + +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class MigrateColumBlobTypeToBinaryTypeTaskDbSpecificTest { + + private String createMigrationSqlForDriverType(DriverTypeEnum theDriverTypeEnum) { + MigrateColumBlobTypeToBinaryTypeTask task = new MigrateColumBlobTypeToBinaryTypeTask("1", "1", "SOMETABLE", "BLOB_COLUM_NAME", "BIN_COLUM_NAME"); + task.setDriverType(theDriverTypeEnum); + + return task.buildSqlStatement(); + } + + static Stream paramArguments(){ + return Stream.of( + Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", MYSQL_5_7), + Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", DERBY_EMBEDDED), + Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", ORACLE_12C), + Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", MARIADB_10_1), + Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", COCKROACHDB_21_1), + Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", H2_EMBEDDED), + Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", MSSQL_2012), + + Arguments.of("update sometable set bin_colum_name = lo_get(blob_colum_name) where blob_colum_name is not null", POSTGRES_9_4) + ); + } + @ParameterizedTest + @MethodSource("paramArguments") + public void testBuildSqlStatementForMySql(String theExpectedSqlString, DriverTypeEnum theDriverTypeEnum) { + assertEquals(theExpectedSqlString, createMigrationSqlForDriverType(theDriverTypeEnum)); + } + +} diff --git a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskDbSpecificTest.java b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskDbSpecificTest.java new file mode 100644 index 00000000000..69c0081b53e --- /dev/null +++ b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/MigrateColumnClobTypeToTextTypeTaskDbSpecificTest.java @@ -0,0 +1,47 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.COCKROACHDB_21_1; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.DERBY_EMBEDDED; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.H2_EMBEDDED; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MARIADB_10_1; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MSSQL_2012; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MYSQL_5_7; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.ORACLE_12C; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class MigrateColumnClobTypeToTextTypeTaskDbSpecificTest { + + private String createMigrationSqlForDriverType(DriverTypeEnum theDriverTypeEnum) { + MigrateColumnClobTypeToTextTypeTask task = new MigrateColumnClobTypeToTextTypeTask("1", "1", "sometable", "clob_colum_name", "text_colum_name"); + task.setDriverType(theDriverTypeEnum); + + return task.buildSqlStatement(); + } + + static Stream paramArguments(){ + return Stream.of( + Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", MYSQL_5_7), + Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", DERBY_EMBEDDED), + Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", ORACLE_12C), + Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", MARIADB_10_1), + Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", COCKROACHDB_21_1), + Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", H2_EMBEDDED), + Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", MSSQL_2012), + + Arguments.of("update sometable set text_colum_name = convert_from(lo_get(clob_colum_name), 'UTF8') where clob_colum_name is not null", DriverTypeEnum.POSTGRES_9_4) + ); + } + @ParameterizedTest + @MethodSource("paramArguments") + public void testBuildSqlStatementForMySql(String theExpectedSqlString, DriverTypeEnum theDriverTypeEnum) { + assertEquals(theExpectedSqlString, createMigrationSqlForDriverType(theDriverTypeEnum)); + } + +} diff --git a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskDbSpecificTest.java b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskDbSpecificTest.java new file mode 100644 index 00000000000..038b764e30d --- /dev/null +++ b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskDbSpecificTest.java @@ -0,0 +1,45 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.DERBY_EMBEDDED; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.H2_EMBEDDED; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MARIADB_10_1; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MSSQL_2012; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MYSQL_5_7; +import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.ORACLE_12C; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class RenameTableTaskDbSpecificTest { + + private String createRenameTableSql(DriverTypeEnum theDriverTypeEnum) { + RenameTableTask task = new RenameTableTask("1", "1", "oldname","newname"); + task.setDriverType(theDriverTypeEnum); + + return task.buildRenameTableSqlStatement(); + } + + static Stream paramArguments(){ + return Stream.of( + Arguments.of("rename table oldname to newname", MYSQL_5_7), + Arguments.of("rename table oldname to newname", DERBY_EMBEDDED), + Arguments.of("alter table oldname rename to newname", ORACLE_12C), + Arguments.of("alter table oldname rename to newname", MARIADB_10_1), + Arguments.of("alter table oldname rename to newname", DriverTypeEnum.POSTGRES_9_4), + Arguments.of("alter table oldname rename to newname", H2_EMBEDDED), + Arguments.of("sp_rename 'oldname', 'newname'", MSSQL_2012) + + ); + } + @ParameterizedTest + @MethodSource("paramArguments") + public void testBuildSqlStatementForMySql(String theExpectedSqlString, DriverTypeEnum theDriverTypeEnum) { + assertEquals(theExpectedSqlString, createRenameTableSql(theDriverTypeEnum)); + } + +} diff --git a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java new file mode 100644 index 00000000000..a1c6aad40ba --- /dev/null +++ b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java @@ -0,0 +1,66 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.JdbcUtils; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.sql.SQLException; +import java.util.Set; +import java.util.function.Supplier; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.not; +import static org.junit.jupiter.api.Assertions.fail; + +public class RenameTableTaskTest extends BaseTest { + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("data") + public void testRenameTableTask_whenTableExists_willRenameTheTable(Supplier theTestDatabaseDetails) throws SQLException { + // given + before(theTestDatabaseDetails); + final String newTableName = "NEWTABLE"; + final String oldTableName = "SOMETABLE"; + + executeSql("create table " + oldTableName + " (PID bigint not null, TEXTCOL varchar(255))"); + + RenameTableTask task = new RenameTableTask("1", "1", oldTableName, newTableName); + task.setTableName(oldTableName); + getMigrator().addTask(task); + + // when + getMigrator().migrate(); + + // then + Set tableNames = JdbcUtils.getTableNames(getConnectionProperties()); + assertThat(tableNames, hasItem(newTableName)); + assertThat(tableNames, not(hasItem(oldTableName))); + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("data") + public void testRenameTableTask_whenTableDoesNotExists_willRaiseException(Supplier theTestDatabaseDetails) throws SQLException { + // given + before(theTestDatabaseDetails); + final String newTableName = "NEWTABLE"; + final String oldTableName = "SOMETABLE"; + + RenameTableTask task = new RenameTableTask("1", "1", oldTableName, newTableName); + getMigrator().addTask(task); + + // when + try { + getMigrator().migrate(); + fail(); + } catch (Exception e){ + // then + assertThat(e.getMessage(), containsString("2516")); + } + + } + +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java index 771b934d71b..14480252f5e 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java @@ -77,7 +77,8 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator // validate the exportId if (!StringUtils.isBlank(theParameters.getExportIdentifier())) { - if (myBinaryStorageSvc != null && !myBinaryStorageSvc.isValidBlobId(theParameters.getExportIdentifier())) { + if (myBinaryStorageSvc != null + && !myBinaryStorageSvc.isValidBinaryContentId(theParameters.getExportIdentifier())) { errorMsgs.add("Export ID does not conform to the current blob storage implementation's limitations."); } } diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java index cc8b8dc2758..040cca120d9 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java @@ -67,7 +67,7 @@ public class BulkExportJobParametersValidatorTest { // when when(myDaoRegistry.isResourceTypeSupported(anyString())) .thenReturn(true); - when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(false); + when(myIBinaryStorageSvc.isValidBinaryContentId(any())).thenReturn(false); List errors = myValidator.validate(null, parameters); // verify @@ -84,7 +84,7 @@ public class BulkExportJobParametersValidatorTest { when(myDaoRegistry.isResourceTypeSupported(anyString())) .thenReturn(true); - when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(true); + when(myIBinaryStorageSvc.isValidBinaryContentId(any())).thenReturn(true); List errors = myValidator.validate(null, parameters); // verify diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java index 027121b785d..8e7d9425508 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java @@ -43,7 +43,7 @@ public interface IBinaryStorageSvc { * @param theNewBlobId the blob ID to validate * @return true if the blob ID is valid, false otherwise. */ - default boolean isValidBlobId(String theNewBlobId) { + default boolean isValidBinaryContentId(String theNewBlobId) { return true; // default method here as we don't want to break existing implementations } @@ -77,12 +77,12 @@ public interface IBinaryStorageSvc { * @param theContentType What is the content type * @return true if the storage service should store the item */ - boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType); + boolean shouldStoreBinaryContent(long theSize, IIdType theResourceId, String theContentType); /** - * Generate a new blob ID that will be passed to {@link #storeBlob(IIdType, String, String, InputStream)} later + * Generate a new binaryContent ID that will be passed to {@link #storeBinaryContent(IIdType, String, String, InputStream)} later */ - String newBlobId(); + String newBinaryContentId(); /** * Store a new binary blob @@ -92,16 +92,17 @@ public interface IBinaryStorageSvc { * @param theContentType The content type to associate with this blob * @param theInputStream An InputStream to read from. This method should close the stream when it has been fully consumed. * @return Returns details about the stored data - * @deprecated Use {@link #storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, + * @deprecated Use {@link #storeBinaryContent(IIdType theResourceId, String theBlobIdOrNull, String theContentType, * InputStream theInputStream, RequestDetails theRequestDetails)} instead. This method * will be removed because it doesn't receive the 'theRequestDetails' parameter it needs to forward to the pointcut) */ @Deprecated(since = "6.6.0", forRemoval = true) @Nonnull - default StoredDetails storeBlob( + default StoredDetails storeBinaryContent( IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream) throws IOException { - return storeBlob(theResourceId, theBlobIdOrNull, theContentType, theInputStream, new ServletRequestDetails()); + return storeBinaryContent( + theResourceId, theBlobIdOrNull, theContentType, theInputStream, new ServletRequestDetails()); } /** @@ -115,7 +116,7 @@ public interface IBinaryStorageSvc { * @return Returns details about the stored data */ @Nonnull - StoredDetails storeBlob( + StoredDetails storeBinaryContent( IIdType theResourceId, String theBlobIdOrNull, String theContentType, @@ -123,14 +124,15 @@ public interface IBinaryStorageSvc { RequestDetails theRequestDetails) throws IOException; - StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) throws IOException; + StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) throws IOException; /** * @return Returns true if the blob was found and written, of false if the blob was not found (i.e. it was expunged or the ID was invalid) */ - boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException; + boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) + throws IOException; - void expungeBlob(IIdType theResourceId, String theBlobId); + void expungeBinaryContent(IIdType theResourceId, String theBlobId); /** * Fetch the contents of the given blob @@ -139,7 +141,7 @@ public interface IBinaryStorageSvc { * @param theBlobId The blob ID * @return The payload as a byte array */ - byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException; + byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) throws IOException; /** * Fetch the byte[] contents of a given Binary resource's `data` element. If the data is a standard base64encoded string that is embedded, return it. @@ -148,5 +150,5 @@ public interface IBinaryStorageSvc { * @param theResource The Binary resource you want to extract data bytes from * @return The binary data blob as a byte array */ - byte[] fetchDataBlobFromBinary(IBaseBinary theResource) throws IOException; + byte[] fetchDataByteArrayFromBinary(IBaseBinary theResource) throws IOException; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java index 332e4a74f15..14b4b583a2e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java @@ -33,8 +33,8 @@ import java.util.Date; public class StoredDetails implements IModelJson { - @JsonProperty("blobId") - private String myBlobId; + @JsonProperty("binaryContentId") + private String myBinaryContentId; @JsonProperty("bytes") private long myBytes; @@ -62,12 +62,12 @@ public class StoredDetails implements IModelJson { * Constructor */ public StoredDetails( - @Nonnull String theBlobId, + @Nonnull String theBinaryContentId, long theBytes, @Nonnull String theContentType, HashingInputStream theIs, Date thePublished) { - myBlobId = theBlobId; + myBinaryContentId = theBinaryContentId; myBytes = theBytes; myContentType = theContentType; myHash = theIs.hash().toString(); @@ -77,7 +77,7 @@ public class StoredDetails implements IModelJson { @Override public String toString() { return new ToStringBuilder(this) - .append("blobId", myBlobId) + .append("binaryContentId", myBinaryContentId) .append("bytes", myBytes) .append("contentType", myContentType) .append("hash", myHash) @@ -114,12 +114,12 @@ public class StoredDetails implements IModelJson { } @Nonnull - public String getBlobId() { - return myBlobId; + public String getBinaryContentId() { + return myBinaryContentId; } - public StoredDetails setBlobId(String theBlobId) { - myBlobId = theBlobId; + public StoredDetails setBinaryContentId(String theBinaryContentId) { + myBinaryContentId = theBinaryContentId; return this; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java index 8d193daf572..9d4ec9c7cf5 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java @@ -56,7 +56,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import java.awt.*; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -129,7 +128,7 @@ public class BinaryStorageInterceptor> { .collect(Collectors.toList()); for (String next : attachmentIds) { - myBinaryStorageSvc.expungeBlob(theResource.getIdElement(), next); + myBinaryStorageSvc.expungeBinaryContent(theResource.getIdElement(), next); theCounter.incrementAndGet(); ourLog.info( @@ -232,38 +231,39 @@ public class BinaryStorageInterceptor> { long nextPayloadLength = data.length; String nextContentType = nextTarget.getContentType(); boolean shouldStoreBlob = - myBinaryStorageSvc.shouldStoreBlob(nextPayloadLength, resourceId, nextContentType); + myBinaryStorageSvc.shouldStoreBinaryContent(nextPayloadLength, resourceId, nextContentType); if (shouldStoreBlob) { - String newBlobId; + String newBinaryContentId; if (thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) { ByteArrayInputStream inputStream = new ByteArrayInputStream(data); - StoredDetails storedDetails = myBinaryStorageSvc.storeBlob( + StoredDetails storedDetails = myBinaryStorageSvc.storeBinaryContent( resourceId, null, nextContentType, inputStream, theRequestDetails); - newBlobId = storedDetails.getBlobId(); + newBinaryContentId = storedDetails.getBinaryContentId(); } else { assert thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePointcut.name(); - newBlobId = myBinaryStorageSvc.newBlobId(); + newBinaryContentId = myBinaryStorageSvc.newBinaryContentId(); - String prefix = invokeAssignBlobPrefix(theRequestDetails, theResource); + String prefix = invokeAssignBinaryContentPrefix(theRequestDetails, theResource); if (isNotBlank(prefix)) { - newBlobId = prefix + newBlobId; + newBinaryContentId = prefix + newBinaryContentId; } - if (myBinaryStorageSvc.isValidBlobId(newBlobId)) { + if (myBinaryStorageSvc.isValidBinaryContentId(newBinaryContentId)) { List deferredBinaryTargets = getOrCreateDeferredBinaryStorageList(theResource); DeferredBinaryTarget newDeferredBinaryTarget = - new DeferredBinaryTarget(newBlobId, nextTarget, data); + new DeferredBinaryTarget(newBinaryContentId, nextTarget, data); deferredBinaryTargets.add(newDeferredBinaryTarget); newDeferredBinaryTarget.setBlobIdPrefixHookApplied(true); } else { throw new InternalErrorException(Msg.code(2341) - + "Invalid blob ID for backing storage service.[blobId=" + newBlobId + ",service=" + + "Invalid binaryContent ID for backing storage service.[binaryContentId=" + + newBinaryContentId + ",service=" + myBinaryStorageSvc.getClass().getName() + "]"); } } - myBinaryAccessProvider.replaceDataWithExtension(nextTarget, newBlobId); + myBinaryAccessProvider.replaceDataWithExtension(nextTarget, newBinaryContentId); } } } @@ -273,19 +273,32 @@ public class BinaryStorageInterceptor> { * This invokes the {@link Pointcut#STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX} hook and returns the prefix to use for the blob ID, or null if there are no implementers. * @return A string, which will be used to prefix the blob ID. May be null. */ - private String invokeAssignBlobPrefix(RequestDetails theRequest, IBaseResource theResource) { - if (!CompositeInterceptorBroadcaster.hasHooks( - Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest)) { + private String invokeAssignBinaryContentPrefix(RequestDetails theRequest, IBaseResource theResource) { + // TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period + boolean hasStorageBinaryAssignBlobIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest); + + boolean hasStorageBinaryAssignBinaryContentIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX, myInterceptorBroadcaster, theRequest); + + if (!(hasStorageBinaryAssignBlobIdPrefixHooks || hasStorageBinaryAssignBinaryContentIdPrefixHooks)) { return null; } HookParams params = new HookParams().add(RequestDetails.class, theRequest).add(IBaseResource.class, theResource); - BaseBinaryStorageSvcImpl.setBlobIdPrefixApplied(theRequest); + BaseBinaryStorageSvcImpl.setBinaryContentIdPrefixApplied(theRequest); + + Pointcut pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX; + + // TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period + if (hasStorageBinaryAssignBlobIdPrefixHooks) { + pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX; + } return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( - myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, params); + myInterceptorBroadcaster, theRequest, pointcutToInvoke, params); } @Nonnull @@ -317,7 +330,7 @@ public class BinaryStorageInterceptor> { InputStream dataStream = next.getDataStream(); String contentType = target.getContentType(); RequestDetails requestDetails = initRequestDetails(next); - myBinaryStorageSvc.storeBlob(resourceId, blobId, contentType, dataStream, requestDetails); + myBinaryStorageSvc.storeBinaryContent(resourceId, blobId, contentType, dataStream, requestDetails); } } } @@ -325,7 +338,7 @@ public class BinaryStorageInterceptor> { private RequestDetails initRequestDetails(DeferredBinaryTarget theDeferredBinaryTarget) { ServletRequestDetails requestDetails = new ServletRequestDetails(); if (theDeferredBinaryTarget.isBlobIdPrefixHookApplied()) { - BaseBinaryStorageSvcImpl.setBlobIdPrefixApplied(requestDetails); + BaseBinaryStorageSvcImpl.setBinaryContentIdPrefixApplied(requestDetails); } return requestDetails; } @@ -374,14 +387,15 @@ public class BinaryStorageInterceptor> { Optional attachmentId = nextTarget.getAttachmentId(); if (attachmentId.isPresent()) { - StoredDetails blobDetails = myBinaryStorageSvc.fetchBlobDetails(resourceId, attachmentId.get()); + StoredDetails blobDetails = + myBinaryStorageSvc.fetchBinaryContentDetails(resourceId, attachmentId.get()); if (blobDetails == null) { String msg = myCtx.getLocalizer().getMessage(BinaryAccessProvider.class, "unknownBlobId"); throw new InvalidRequestException(Msg.code(1330) + msg); } if ((theCumulativeInflatedBytes + blobDetails.getBytes()) < myAutoInflateBinariesMaximumBytes) { - byte[] bytes = myBinaryStorageSvc.fetchBlob(resourceId, attachmentId.get()); + byte[] bytes = myBinaryStorageSvc.fetchBinaryContent(resourceId, attachmentId.get()); nextTarget.setData(bytes); theCumulativeInflatedBytes += blobDetails.getBytes(); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java index cd6a5a870d6..5337bd8eee9 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java @@ -118,7 +118,7 @@ public class BinaryAccessProvider { String blobId = attachmentId.get(); - StoredDetails blobDetails = myBinaryStorageSvc.fetchBlobDetails(theResourceId, blobId); + StoredDetails blobDetails = myBinaryStorageSvc.fetchBinaryContentDetails(theResourceId, blobId); if (blobDetails == null) { String msg = myCtx.getLocalizer().getMessage(BinaryAccessProvider.class, "unknownBlobId"); throw new InvalidRequestException(Msg.code(1331) + msg); @@ -138,7 +138,7 @@ public class BinaryAccessProvider { theServletResponse.addHeader( Constants.HEADER_LAST_MODIFIED, DateUtils.formatDate(blobDetails.getPublished())); - myBinaryStorageSvc.writeBlob(theResourceId, blobId, theServletResponse.getOutputStream()); + myBinaryStorageSvc.writeBinaryContent(theResourceId, blobId, theServletResponse.getOutputStream()); theServletResponse.getOutputStream().close(); } else { @@ -212,11 +212,11 @@ public class BinaryAccessProvider { Msg.code(2073) + "Input stream is empty! Ensure that you are uploading data, and if so, ensure that no interceptors are in use that may be consuming the input stream"); } - if (myBinaryStorageSvc.shouldStoreBlob(size, theResourceId, requestContentType)) { - StoredDetails storedDetails = myBinaryStorageSvc.storeBlob( + if (myBinaryStorageSvc.shouldStoreBinaryContent(size, theResourceId, requestContentType)) { + StoredDetails storedDetails = myBinaryStorageSvc.storeBinaryContent( theResourceId, null, requestContentType, new ByteArrayInputStream(bytes), theRequestDetails); size = storedDetails.getBytes(); - blobId = storedDetails.getBlobId(); + blobId = storedDetails.getBinaryContentId(); Validate.notBlank(blobId, "BinaryStorageSvc returned a null blob ID"); // should not happen Validate.isTrue(size == theServletRequest.getContentLength(), "Unexpected stored size"); // Sanity check } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java index f94964b3922..97efb62d066 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java @@ -57,7 +57,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { public static long DEFAULT_MAXIMUM_BINARY_SIZE = Long.MAX_VALUE - 1; - public static String BLOB_ID_PREFIX_APPLIED = "blob-id-prefix-applied"; + public static String BINARY_CONTENT_ID_PREFIX_APPLIED = "binary-content-id-prefix-applied"; private final int ID_LENGTH = 100; private long myMaximumBinarySize = DEFAULT_MAXIMUM_BINARY_SIZE; @@ -95,20 +95,20 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { } @Override - public String newBlobId() { + public String newBinaryContentId() { return RandomTextUtils.newSecureRandomAlphaNumericString(ID_LENGTH); } /** - * Default implementation is to return true for any Blob ID. + * Default implementation is to return true for any binary content ID. */ @Override - public boolean isValidBlobId(String theNewBlobId) { + public boolean isValidBinaryContentId(String theNewBinaryContentId) { return true; } @Override - public boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType) { + public boolean shouldStoreBinaryContent(long theSize, IIdType theResourceId, String theContentType) { return theSize >= getMinimumBinarySize(); } @@ -139,63 +139,83 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { since = "6.6.0 - Maintained for interface backwards compatibility. Note that invokes interceptor pointcut with empty parameters", forRemoval = true) - protected String provideIdForNewBlob(String theBlobIdOrNull) { - return isNotBlank(theBlobIdOrNull) ? theBlobIdOrNull : newBlobId(); + protected String provideIdForNewBinaryContent(String theBinaryContentIdOrNull) { + return isNotBlank(theBinaryContentIdOrNull) ? theBinaryContentIdOrNull : newBinaryContentId(); } @Nonnull - protected String provideIdForNewBlob( - String theBlobIdOrNull, byte[] theBytes, RequestDetails theRequestDetails, String theContentType) { - String blobId = isNotBlank(theBlobIdOrNull) ? theBlobIdOrNull : newBlobId(); + protected String provideIdForNewBinaryContent( + String theBinaryContentIdOrNull, byte[] theBytes, RequestDetails theRequestDetails, String theContentType) { + String binaryContentId = isNotBlank(theBinaryContentIdOrNull) ? theBinaryContentIdOrNull : newBinaryContentId(); - // make sure another pointcut didn't already apply a prefix to the blobId - if (isBlobIdPrefixApplied(theRequestDetails)) { - return blobId; + // make sure another pointcut didn't already apply a prefix to the binaryContentId + if (isBinaryContentIdPrefixApplied(theRequestDetails)) { + return binaryContentId; } - String blobPrefixFromHooksOrNull = callBlobIdPointcut(theBytes, theRequestDetails, theContentType); - String blobIdPrefixFromHooks = blobPrefixFromHooksOrNull == null ? "" : blobPrefixFromHooksOrNull; - return blobIdPrefixFromHooks + blobId; + String binaryContentIdPrefixFromHooksOrNull = + callBinaryContentIdPointcut(theBytes, theRequestDetails, theContentType); + String binaryContentIdPrefixFromHooks = StringUtils.defaultString(binaryContentIdPrefixFromHooksOrNull); + return binaryContentIdPrefixFromHooks + binaryContentId; } - protected boolean isBlobIdPrefixApplied(RequestDetails theRequestDetails) { - return theRequestDetails.getUserData().get(BLOB_ID_PREFIX_APPLIED) == Boolean.TRUE; + protected boolean isBinaryContentIdPrefixApplied(RequestDetails theRequestDetails) { + return theRequestDetails.getUserData().get(BINARY_CONTENT_ID_PREFIX_APPLIED) == Boolean.TRUE; } - public static void setBlobIdPrefixApplied(RequestDetails theRequestDetails) { - theRequestDetails.getUserData().put(BLOB_ID_PREFIX_APPLIED, true); + public static void setBinaryContentIdPrefixApplied(RequestDetails theRequestDetails) { + theRequestDetails.getUserData().put(BINARY_CONTENT_ID_PREFIX_APPLIED, true); } /** - * Invokes STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX pointcut if present - * @return null if pointcut is not present + * This invokes the {@link Pointcut#STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX} hook and returns the prefix to use for the binary content ID, or null if there are no implementers. + * @return A string, which will be used to prefix the binary content ID. May be null. */ @Nullable - private String callBlobIdPointcut(byte[] theBytes, RequestDetails theRequestDetails, String theContentType) { - // Interceptor call: STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX + private String callBinaryContentIdPointcut( + byte[] theBytes, RequestDetails theRequestDetails, String theContentType) { + // TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period. + // Deprecated in 7.2.0. + boolean hasStorageBinaryAssignBlobIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequestDetails); + + boolean hasStorageBinaryAssignBinaryContentIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX, myInterceptorBroadcaster, theRequestDetails); + + if (!(hasStorageBinaryAssignBlobIdPrefixHooks || hasStorageBinaryAssignBinaryContentIdPrefixHooks)) { + return null; + } + IBaseBinary binary = BinaryUtil.newBinary(myFhirContext).setContent(theBytes).setContentType(theContentType); HookParams hookParams = new HookParams().add(RequestDetails.class, theRequestDetails).add(IBaseResource.class, binary); - setBlobIdPrefixApplied(theRequestDetails); + setBinaryContentIdPrefixApplied(theRequestDetails); + + Pointcut pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX; + + // TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period + if (hasStorageBinaryAssignBlobIdPrefixHooks) { + pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX; + } return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( - myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, hookParams); + myInterceptorBroadcaster, theRequestDetails, pointcutToInvoke, hookParams); } @Override - public byte[] fetchDataBlobFromBinary(IBaseBinary theBaseBinary) throws IOException { + public byte[] fetchDataByteArrayFromBinary(IBaseBinary theBaseBinary) throws IOException { IPrimitiveType dataElement = BinaryUtil.getOrCreateData(myFhirContext, theBaseBinary); byte[] value = dataElement.getValue(); if (value == null) { Optional attachmentId = getAttachmentId((IBaseHasExtensions) dataElement); if (attachmentId.isPresent()) { - value = fetchBlob(theBaseBinary.getIdElement(), attachmentId.get()); + value = fetchBinaryContent(theBaseBinary.getIdElement(), attachmentId.get()); } else { throw new InternalErrorException( - Msg.code(1344) + "Unable to load binary blob data for " + theBaseBinary.getIdElement()); + Msg.code(1344) + "Unable to load binary content data for " + theBaseBinary.getIdElement()); } } return value; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java index c2683a79957..35fb8bca49b 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java @@ -38,7 +38,7 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc { } @Override - public boolean isValidBlobId(String theNewBlobId) { + public boolean isValidBinaryContentId(String theNewBlobId) { return true; } @@ -58,18 +58,18 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc { } @Override - public boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType) { + public boolean shouldStoreBinaryContent(long theSize, IIdType theResourceId, String theContentType) { return false; } @Override - public String newBlobId() { + public String newBinaryContentId() { throw new UnsupportedOperationException(Msg.code(1345)); } @Nonnull @Override - public StoredDetails storeBlob( + public StoredDetails storeBinaryContent( IIdType theResourceId, String theBlobIdOrNull, String theContentType, @@ -79,27 +79,27 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc { } @Override - public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) { + public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) { throw new UnsupportedOperationException(Msg.code(1347)); } @Override - public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) { + public boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) { throw new UnsupportedOperationException(Msg.code(1348)); } @Override - public void expungeBlob(IIdType theIdElement, String theBlobId) { + public void expungeBinaryContent(IIdType theIdElement, String theBlobId) { throw new UnsupportedOperationException(Msg.code(1349)); } @Override - public byte[] fetchBlob(IIdType theResourceId, String theBlobId) { + public byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) { throw new UnsupportedOperationException(Msg.code(1350)); } @Override - public byte[] fetchDataBlobFromBinary(IBaseBinary theResource) { + public byte[] fetchDataByteArrayFromBinary(IBaseBinary theResource) { throw new UnsupportedOperationException(Msg.code(1351)); } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java index 07c2cd2ffaa..1fdb05a6cc0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java @@ -81,13 +81,13 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { * This implementation prevents: \ / | . */ @Override - public boolean isValidBlobId(String theNewBlobId) { - return !StringUtils.containsAny(theNewBlobId, '\\', '/', '|', '.'); + public boolean isValidBinaryContentId(String theNewBinaryContentId) { + return !StringUtils.containsAny(theNewBinaryContentId, '\\', '/', '|', '.'); } @Nonnull @Override - public StoredDetails storeBlob( + public StoredDetails storeBinaryContent( IIdType theResourceId, String theBlobIdOrNull, String theContentType, @@ -95,7 +95,7 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { RequestDetails theRequestDetails) throws IOException { - String id = super.provideIdForNewBlob(theBlobIdOrNull, null, theRequestDetails, theContentType); + String id = super.provideIdForNewBinaryContent(theBlobIdOrNull, null, theRequestDetails, theContentType); File storagePath = getStoragePath(id, true); // Write binary file @@ -126,7 +126,7 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { } @Override - public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) throws IOException { + public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) throws IOException { StoredDetails retVal = null; File storagePath = getStoragePath(theBlobId, false); @@ -145,7 +145,8 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { } @Override - public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException { + public boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) + throws IOException { InputStream inputStream = getInputStream(theResourceId, theBlobId); if (inputStream != null) { @@ -172,7 +173,7 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { } @Override - public void expungeBlob(IIdType theResourceId, String theBlobId) { + public void expungeBinaryContent(IIdType theResourceId, String theBlobId) { File storagePath = getStoragePath(theBlobId, false); if (storagePath != null) { File storageFile = getStorageFilename(storagePath, theResourceId, theBlobId); @@ -187,8 +188,8 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { } @Override - public byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException { - StoredDetails details = fetchBlobDetails(theResourceId, theBlobId); + public byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) throws IOException { + StoredDetails details = fetchBinaryContentDetails(theResourceId, theBlobId); try (InputStream inputStream = getInputStream(theResourceId, theBlobId)) { if (inputStream != null) { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java index 7a470fda0e0..5d68ebe2ce8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java @@ -54,7 +54,7 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme @Nonnull @Override - public StoredDetails storeBlob( + public StoredDetails storeBinaryContent( IIdType theResourceId, String theBlobIdOrNull, String theContentType, @@ -66,7 +66,7 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme CountingInputStream countingIs = createCountingInputStream(hashingIs); byte[] bytes = IOUtils.toByteArray(countingIs); - String id = super.provideIdForNewBlob(theBlobIdOrNull, bytes, theRequestDetails, theContentType); + String id = super.provideIdForNewBinaryContent(theBlobIdOrNull, bytes, theRequestDetails, theContentType); String key = toKey(theResourceId, id); theInputStream.close(); myDataMap.put(key, bytes); @@ -77,13 +77,14 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme } @Override - public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) { + public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) { String key = toKey(theResourceId, theBlobId); return myDetailsMap.get(key); } @Override - public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException { + public boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) + throws IOException { String key = toKey(theResourceId, theBlobId); byte[] bytes = myDataMap.get(key); if (bytes == null) { @@ -94,14 +95,14 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme } @Override - public void expungeBlob(IIdType theResourceId, String theBlobId) { + public void expungeBinaryContent(IIdType theResourceId, String theBlobId) { String key = toKey(theResourceId, theBlobId); myDataMap.remove(key); myDetailsMap.remove(key); } @Override - public byte[] fetchBlob(IIdType theResourceId, String theBlobId) { + public byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) { String key = toKey(theResourceId, theBlobId); return myDataMap.get(key); } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java index 9c8dda60674..7958bd4d976 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR Test Utilities + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.test.util; import ch.qos.logback.classic.spi.ILoggingEvent; From 79c7820d02247c76a9a4cc15ddac505a7b038f0b Mon Sep 17 00:00:00 2001 From: pano-smals <135063507+pano-smals@users.noreply.github.com> Date: Fri, 19 Apr 2024 11:32:46 +0200 Subject: [PATCH 17/26] bugfix: creating Hibernate dialect with specific version (#5725) * bugfix: creating Hibernate dialect with specific version fixes https://github.com/hapifhir/hapi-fhir/issues/5723 * Apply fix to other dialects, and add changelog * Spotless --------- Co-authored-by: James Agnew --- .../changelog/7_2_0/5725-fix-dialect-constructors.yaml | 8 ++++++++ .../jpa/model/dialect/HapiFhirCockroachDialect.java | 8 ++++++-- .../fhir/jpa/model/dialect/HapiFhirDerbyDialect.java | 10 +++++----- .../uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java | 10 +++++----- .../fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java | 10 +++++----- .../fhir/jpa/model/dialect/HapiFhirMySQLDialect.java | 10 +++++----- .../fhir/jpa/model/dialect/HapiFhirOracleDialect.java | 10 +++++----- .../jpa/model/dialect/HapiFhirPostgresDialect.java | 10 +++++----- .../jpa/model/dialect/HapiFhirSQLServerDialect.java | 10 +++++----- 9 files changed, 49 insertions(+), 37 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5725-fix-dialect-constructors.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5725-fix-dialect-constructors.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5725-fix-dialect-constructors.yaml new file mode 100644 index 00000000000..4e49bf2c5bd --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5725-fix-dialect-constructors.yaml @@ -0,0 +1,8 @@ +--- +type: fix +issue: 5725 +jira: SMILE-8343 +title: "The recommended constructor was not present on hibernate dialects + provided by HAPI FHIR, leading to warnings during startup, and failures + in some cases. This has been corrected. Thanks to GitHub user + @pano-smals for the contribution!" diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java index cf333149158..14240e3c84a 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.model.dialect; import org.hibernate.dialect.CockroachDialect; -import org.hibernate.dialect.DatabaseVersion; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; /** * Dialect for CockroachDB database. @@ -29,7 +29,11 @@ import org.hibernate.dialect.DatabaseVersion; public class HapiFhirCockroachDialect extends CockroachDialect { public HapiFhirCockroachDialect() { - super(DatabaseVersion.make(21, 1)); + super(); + } + + public HapiFhirCockroachDialect(DialectResolutionInfo info) { + super(info); } /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java index 147f2fc7a2d..a9c0697b683 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.model.dialect; -import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.DerbyDialect; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; /** * Dialect for Derby database. @@ -28,12 +28,12 @@ import org.hibernate.dialect.DerbyDialect; */ public class HapiFhirDerbyDialect extends DerbyDialect { - public HapiFhirDerbyDialect(DatabaseVersion theDatabaseVersion) { - super(theDatabaseVersion); + public HapiFhirDerbyDialect() { + super(); } - public HapiFhirDerbyDialect() { - super(DatabaseVersion.make(10, 14, 2)); + public HapiFhirDerbyDialect(DialectResolutionInfo info) { + super(info); } /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java index e42786a21dd..6aec92f2e39 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.model.dialect; -import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.H2Dialect; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; /** * Dialect for H2 database. @@ -28,12 +28,12 @@ import org.hibernate.dialect.H2Dialect; */ public class HapiFhirH2Dialect extends H2Dialect { - public HapiFhirH2Dialect(DatabaseVersion theDatabaseVersion) { - super(theDatabaseVersion); + public HapiFhirH2Dialect() { + super(); } - public HapiFhirH2Dialect() { - super(DatabaseVersion.make(2, 2, 220)); + public HapiFhirH2Dialect(DialectResolutionInfo info) { + super(info); } /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java index 0d9f32b8bf9..0ccefd915ff 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.model.dialect; -import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.MariaDBDialect; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; /** * Dialect for MySQL database. @@ -28,12 +28,12 @@ import org.hibernate.dialect.MariaDBDialect; */ public class HapiFhirMariaDBDialect extends MariaDBDialect { - public HapiFhirMariaDBDialect(DatabaseVersion theDatabaseVersion) { - super(theDatabaseVersion); + public HapiFhirMariaDBDialect() { + super(); } - public HapiFhirMariaDBDialect() { - super(DatabaseVersion.make(10, 11, 5)); + public HapiFhirMariaDBDialect(DialectResolutionInfo info) { + super(info); } /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java index f02c1aa6829..206eb4f81cf 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.model.dialect; -import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.MySQLDialect; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; /** * Dialect for MySQL database. @@ -28,12 +28,12 @@ import org.hibernate.dialect.MySQLDialect; */ public class HapiFhirMySQLDialect extends MySQLDialect { - public HapiFhirMySQLDialect(DatabaseVersion theDatabaseVersion) { - super(theDatabaseVersion); + public HapiFhirMySQLDialect() { + super(); } - public HapiFhirMySQLDialect() { - super(DatabaseVersion.make(5, 7)); + public HapiFhirMySQLDialect(DialectResolutionInfo info) { + super(info); } /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java index 1c7b0d66d08..d05be69d0bf 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.model.dialect; -import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.OracleDialect; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; /** * Dialect for Oracle database. @@ -28,12 +28,12 @@ import org.hibernate.dialect.OracleDialect; */ public class HapiFhirOracleDialect extends OracleDialect { - public HapiFhirOracleDialect(DatabaseVersion theDatabaseVersion) { - super(theDatabaseVersion); + public HapiFhirOracleDialect() { + super(); } - public HapiFhirOracleDialect() { - super(DatabaseVersion.make(12, 2)); + public HapiFhirOracleDialect(DialectResolutionInfo info) { + super(info); } /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java index 80127e1b58c..831161cbde7 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java @@ -19,17 +19,17 @@ */ package ca.uhn.fhir.jpa.model.dialect; -import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.PostgreSQLDialect; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; public class HapiFhirPostgresDialect extends PostgreSQLDialect { - public HapiFhirPostgresDialect(DatabaseVersion theDatabaseVersion) { - super(theDatabaseVersion); + public HapiFhirPostgresDialect() { + super(); } - public HapiFhirPostgresDialect() { - super(DatabaseVersion.make(10, 0, 0)); + public HapiFhirPostgresDialect(DialectResolutionInfo info) { + super(info); } /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java index b25076e319a..f75178efa95 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.model.dialect; -import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.SQLServerDialect; +import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; /** * Dialect for MS SQL Server database. @@ -28,12 +28,12 @@ import org.hibernate.dialect.SQLServerDialect; */ public class HapiFhirSQLServerDialect extends SQLServerDialect { - public HapiFhirSQLServerDialect(DatabaseVersion theDatabaseVersion) { - super(theDatabaseVersion); + public HapiFhirSQLServerDialect() { + super(); } - public HapiFhirSQLServerDialect() { - super(DatabaseVersion.make(11)); + public HapiFhirSQLServerDialect(DialectResolutionInfo info) { + super(info); } /** From 54d4b0fefe706664e85c498b2ef4bac307c1cc58 Mon Sep 17 00:00:00 2001 From: longma1 <32119004+longma1@users.noreply.github.com> Date: Fri, 19 Apr 2024 13:02:24 -0600 Subject: [PATCH 18/26] undo unintended backport fix undo (#5862) Co-authored-by: Long Ma --- .../ca/uhn/fhir/jpa/auth/RuleImplOpTest.java | 135 ------------------ .../server/interceptor/auth/RuleImplOp.java | 4 +- 2 files changed, 1 insertion(+), 138 deletions(-) delete mode 100644 hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java deleted file mode 100644 index 1f343735b83..00000000000 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/auth/RuleImplOpTest.java +++ /dev/null @@ -1,135 +0,0 @@ -package ca.uhn.fhir.jpa.auth; - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.api.RestOperationTypeEnum; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor; -import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule; -import ca.uhn.fhir.rest.server.interceptor.auth.IRuleApplier; -import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum; -import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder; -import ca.uhn.fhir.util.BundleBuilder; -import org.hl7.fhir.instance.model.api.IBaseBundle; -import org.hl7.fhir.r4.model.CodeType; -import org.hl7.fhir.r4.model.IdType; -import org.hl7.fhir.r4.model.Parameters; -import org.hl7.fhir.r4.model.Patient; -import org.hl7.fhir.r4.model.StringType; -import org.junit.jupiter.api.Test; - -import java.util.HashSet; -import java.util.List; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; - -public class RuleImplOpTest { - private static final String OPERATION = "operation"; - private static final String TYPE = "type"; - private static final String PATH = "path"; - private static final String VALUE = "value"; - private static final String REPLACE = "replace"; - private static final String PATIENT_BIRTH_DATE = "Patient.birthDate"; - private static final Parameters PARAMETERS = buildParameters(); - private static final String DOCUMENT = "document"; - private static final String ERROR_TEMPLATE = "HAPI-0339: Can not handle nested Parameters with %s operation"; - private static final String ERROR_UPDATE = String.format(ERROR_TEMPLATE, "UPDATE"); - private static final String ERROR_CREATE = String.format(ERROR_TEMPLATE, "CREATE"); - - private static final String REQUEST_RULELIST = AuthorizationInterceptor.class.getName() + "_1_RULELIST"; - private final Patient myPatient = buildPatient(); - - private final List myRules = new RuleBuilder() - .allow() - .transaction() - .withAnyOperation() - .andApplyNormalRules() - .andThen() - .allow() - .write() - .allResources() - .withAnyId() - .build(); - - private final IAuthRule myRule = myRules.get(0); - private final FhirContext myFhirContext = FhirContext.forR4Cached(); - private final IBaseBundle myInnerBundle = buildInnerBundler(myFhirContext); - - private final RequestDetails mySystemRequestDetails = buildSystemRequestDetails(myFhirContext, myRules); - private final IRuleApplier myRuleApplier = new AuthorizationInterceptor(); - - @Test - void testTransactionBundleUpdateWithParameters() { - final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); - bundleBuilder.addTransactionUpdateEntry(PARAMETERS); - - try { - applyRule(bundleBuilder.getBundle()); - fail("Expected an InvalidRequestException"); - } catch (InvalidRequestException exception) { - assertEquals(ERROR_UPDATE, exception.getMessage()); - } - } - - @Test - void testTransactionBundleWithNestedBundle() { - final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); - bundleBuilder.addTransactionCreateEntry(myInnerBundle); - - try { - applyRule(bundleBuilder.getBundle()); - fail("Expected an InvalidRequestException"); - } catch (InvalidRequestException exception) { - assertEquals(ERROR_CREATE, exception.getMessage()); - } - } - - @Test - void testTransactionBundlePatchWithParameters() { - final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); - bundleBuilder.addTransactionFhirPatchEntry(myPatient.getIdElement(), PARAMETERS); - - final AuthorizationInterceptor.Verdict verdict = applyRule(bundleBuilder.getBundle()); - - assertThat(verdict.getDecision(), equalTo(PolicyEnum.ALLOW)); - } - - private AuthorizationInterceptor.Verdict applyRule(IBaseBundle theBundle) { - return myRule.applyRule(RestOperationTypeEnum.TRANSACTION, mySystemRequestDetails, theBundle, myPatient.getIdElement(), myPatient, myRuleApplier, new HashSet<>(), null); - } - - private static Parameters buildParameters() { - final Parameters patch = new Parameters(); - - final Parameters.ParametersParameterComponent op = patch.addParameter().setName(OPERATION); - op.addPart().setName(TYPE).setValue(new CodeType(REPLACE)); - op.addPart().setName(PATH).setValue(new CodeType(PATIENT_BIRTH_DATE)); - op.addPart().setName(VALUE).setValue(new StringType("1912-04-14")); - - return patch; - } - - private static RequestDetails buildSystemRequestDetails(FhirContext theFhirContext, List theRules) { - final SystemRequestDetails systemRequestDetails = new SystemRequestDetails(); - systemRequestDetails.setFhirContext(theFhirContext); - systemRequestDetails.getUserData().put(REQUEST_RULELIST, theRules); - - return systemRequestDetails; - } - - private static Patient buildPatient() { - final Patient patient = new Patient(); - patient.setId(new IdType("Patient", "1")); - return patient; - } - - private static IBaseBundle buildInnerBundler(FhirContext theFhirContext) { - final BundleBuilder innerBundleBuilder = new BundleBuilder(theFhirContext); - innerBundleBuilder.setType(DOCUMENT); - return innerBundleBuilder.getBundle(); - } -} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java index 889341089fc..f879e73cee1 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java @@ -859,7 +859,6 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { */ private boolean isInvalidNestedParametersRequest( FhirContext theContext, BundleEntryParts theEntry, RestOperationTypeEnum theOperation) { - IBaseResource resource = theEntry.getResource(); if (resource == null) { return false; @@ -867,10 +866,9 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { RuntimeResourceDefinition resourceDefinition = theContext.getResourceDefinition(resource); final boolean isResourceParameters = PARAMETERS.equals(resourceDefinition.getName()); - final boolean isResourceBundle = BUNDLE.equals(resourceDefinition.getName()); final boolean isOperationPatch = theOperation == RestOperationTypeEnum.PATCH; - return (isResourceParameters && !isOperationPatch) || isResourceBundle; + return (isResourceParameters && !isOperationPatch); } private void setTargetFromResourceId(RequestDetails theRequestDetails, FhirContext ctx, RuleTarget target) { From 4125d8c9adbfc125aa8a36696489e595a3bce4d4 Mon Sep 17 00:00:00 2001 From: Emre Dincturk <74370953+mrdnctrk@users.noreply.github.com> Date: Fri, 19 Apr 2024 17:43:19 -0400 Subject: [PATCH 19/26] added otel span for interceptors (#5853) --- hapi-fhir-base/pom.xml | 5 +++++ .../executor/BaseInterceptorService.java | 17 ++++++++++++++++- ...-opentelemetry-spans-for-interceptors.yaml | 6 ++++++ pom.xml | 19 +++++++++++++++++++ 4 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5855-opentelemetry-spans-for-interceptors.yaml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index 1db4476b4d3..c5ec6eee1f1 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -138,6 +138,11 @@ system-stubs-jupiter test
+ + + io.opentelemetry + opentelemetry-api + diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java index d0ba36e44c3..22a196850ea 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java @@ -31,6 +31,8 @@ import ca.uhn.fhir.util.ReflectionUtil; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; import org.apache.commons.lang3.Validate; @@ -547,7 +549,7 @@ public abstract class BaseInterceptorService & I // Invoke the method try { - return myMethod.invoke(getInterceptor(), args); + return invokeMethod(args); } catch (InvocationTargetException e) { Throwable targetException = e.getTargetException(); if (myPointcut.isShouldLogAndSwallowException(targetException)) { @@ -566,6 +568,19 @@ public abstract class BaseInterceptorService & I throw new InternalErrorException(Msg.code(1911) + e); } } + + @WithSpan("hapifhir.interceptor") + private Object invokeMethod(Object[] args) throws InvocationTargetException, IllegalAccessException { + // Add attributes to the opentelemetry span + Span currentSpan = Span.current(); + currentSpan.setAttribute("hapifhir.interceptor.pointcut_name", myPointcut.name()); + currentSpan.setAttribute( + "hapifhir.interceptor.class_name", + myMethod.getDeclaringClass().getName()); + currentSpan.setAttribute("hapifhir.interceptor.method_name", myMethod.getName()); + + return myMethod.invoke(getInterceptor(), args); + } } protected class HookDescriptor { diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5855-opentelemetry-spans-for-interceptors.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5855-opentelemetry-spans-for-interceptors.yaml new file mode 100644 index 00000000000..52e71439b05 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5855-opentelemetry-spans-for-interceptors.yaml @@ -0,0 +1,6 @@ +--- +type: add +issue: 5855 +title: "If using an OpenTelemetry agent, a span is now generated for each interceptor method call. The span is named +as 'hapifhir.interceptor' and it has the following attributes about the interceptor: +'hapifhir.interceptor.pointcut_name','hapifhir.interceptor.class_name', 'hapifhir.interceptor.method_name'" diff --git a/pom.xml b/pom.xml index c6ef616a573..0ffabdf9385 100644 --- a/pom.xml +++ b/pom.xml @@ -165,6 +165,11 @@ mockito-junit-jupiter test
+ + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations + @@ -977,6 +982,7 @@ 3.3.0 1.8 4.10.0 + 1.32.0 4.1.2 1.4 6.2.5.Final @@ -2264,6 +2270,19 @@ provided true + + + io.opentelemetry + opentelemetry-bom + ${otel.version} + pom + import + + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations + ${otel.version} + From d05009e526c839bbaf69ef6e73ffc6c1744a1e3e Mon Sep 17 00:00:00 2001 From: volodymyr-korzh <132366313+volodymyr-korzh@users.noreply.github.com> Date: Fri, 19 Apr 2024 18:33:10 -0600 Subject: [PATCH 20/26] Delete-expunge does not honor delete resourcesOfType permissions (#5857) * Delete-expunge does not honor delete resourcesOfType permissions - fix --- ...r-delete-resources-of-type-permission.yaml | 5 + .../r4/AuthorizationInterceptorJpaR4Test.java | 160 +++++++++++++++--- .../server/interceptor/auth/RuleImplOp.java | 32 +++- 3 files changed, 165 insertions(+), 32 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5856-delete-expunge-does-not-honor-delete-resources-of-type-permission.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5856-delete-expunge-does-not-honor-delete-resources-of-type-permission.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5856-delete-expunge-does-not-honor-delete-resources-of-type-permission.yaml new file mode 100644 index 00000000000..e54ef45b478 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5856-delete-expunge-does-not-honor-delete-resources-of-type-permission.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5856 +title: "Previously, it was possible to execute the `$delete-expunge` operation on a resource even if the user did not +have delete permissions for the given resource type. This has been fixed." diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java index fa54f31d854..ac93db3d643 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java @@ -1425,12 +1425,8 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes @Test - public void testDeleteExpungeBlocked() { - // Create Patient, and Observation that refers to it - Patient patient = new Patient(); - patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100"); - patient.addName().setFamily("Tester").addGiven("Siobhan"); - myClient.create().resource(patient).execute().getId().toUnqualifiedVersionless(); + public void testDeleteExpunge_allResourcesPermission_forbidden() { + createPatient(); // Allow any deletes, but don't allow expunge myServer.getRestfulServer().registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) { @@ -1442,25 +1438,12 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes } }); - try { - myClient - .delete() - .resourceConditionalByUrl("Patient?name=Siobhan&_expunge=true") - .execute(); - fail(); - } catch (ForbiddenOperationException e) { - // good - } + validateDeleteConditionalByUrlIsForbidden("Patient?name=Siobhan&_expunge=true"); } @Test - public void testDeleteExpungeAllowed() { - - // Create Patient, and Observation that refers to it - Patient patient = new Patient(); - patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100"); - patient.addName().setFamily("Tester").addGiven("Raghad"); - myClient.create().resource(patient).execute().getId().toUnqualifiedVersionless(); + public void testDeleteExpunge_allResourcesPermission_allowed() { + createPatient(); // Allow deletes and allow expunge myServer.getRestfulServer().registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) { @@ -1473,12 +1456,139 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes } }); - myClient - .delete() - .resourceConditionalByUrl("Patient?name=Siobhan&_expunge=true") + executeDeleteConditionalByUrl("Patient?name=Siobhan&_expunge=true"); + } + + private void createDeleteByTypeRule(String theType) { + myServer.getRestfulServer().registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) { + @Override + public List buildRuleList(RequestDetails theRequestDetails) { + return new RuleBuilder().allow() + .delete() + .onExpunge() + .resourcesOfType(theType) + .withAnyId() + .andThen().build(); + } + }); + } + + @Test + public void testDeleteExpunge_typePermission_allowed() { + IIdType id = createPatient(); + createDeleteByTypeRule("Patient"); + executeDeleteConditionalByUrl("Patient?_expunge=true&_id=" + id.getIdPart()); + } + + @Test + public void testDeleteExpunge_typePermission_forbidden() { + IIdType id = createPatient(); + createDeleteByTypeRule("Observation"); + validateDeleteConditionalByUrlIsForbidden("Patient?_expunge=true&_id=" + id.getIdPart()); + } + + @Test + public void testDeleteExpunge_noIdTypePermission_forbidden() { + createDeleteByTypeRule("Observation"); + validateDeleteConditionalByUrlIsForbidden("Patient?_expunge=true"); + } + + private void createPatientCompartmentRule(IIdType theId) { + myServer.getRestfulServer().registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) { + @Override + public List buildRuleList(RequestDetails theRequestDetails) { + return new RuleBuilder().allow() + .delete() + .onExpunge() + .allResources() + .inCompartment("Patient", theId) + .andThen().build(); + } + }); + } + + @Test + public void testDeleteExpunge_compartmentPermission_allowed() { + IIdType id = createPatient(); + createPatientCompartmentRule(id); + executeDeleteConditionalByUrl("Patient?_expunge=true&_id=" + id.getIdPart()); + } + + @Test + public void testDeleteExpunge_compartmentPermission_forbidden() { + IIdType id = createPatient(); + IdDt compartmentId = new IdDt(); + compartmentId.setParts(null, "Patient", "123", null); + createPatientCompartmentRule(compartmentId); + validateDeleteConditionalByUrlIsForbidden("Patient?_expunge=true&_id=" + id.getIdPart()); + } + + @Test + public void testDeleteExpunge_urlWithSearchParameterCompartmentPermission_forbidden() { + IIdType id = createPatient(); + IdDt compartmentId = new IdDt(); + compartmentId.setParts(null, "Patient", "123", null); + createPatientCompartmentRule(compartmentId); + validateDeleteConditionalByUrlIsForbidden("Observation?_expunge=true&patient=" + id.getIdPart()); + } + + @Test + public void testDeleteExpunge_multipleIdsCompartmentPermission_forbidden() { + IIdType id = createPatient(); + createPatientCompartmentRule(id); + validateDeleteConditionalByUrlIsForbidden("Patient?_expunge=true&_id=" + id.getIdPart() + "_id=123"); + } + + private void createTypeInPatientCompartmentRule(IIdType theId) { + myServer.getRestfulServer().registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) { + @Override + public List buildRuleList(RequestDetails theRequestDetails) { + return new RuleBuilder().allow() + .delete() + .onExpunge() + .resourcesOfType("Patient") + .inCompartment("Patient", theId) + .andThen().build(); + } + }); + } + + @Test + public void testDeleteExpunge_typeInCompartmentPermission_allowed() { + IIdType id = createPatient(); + createTypeInPatientCompartmentRule(id); + executeDeleteConditionalByUrl("Patient?_expunge=true&_id=" + id.getIdPart()); + } + + @Test + public void testDeleteExpunge_typeInCompartmentPermission_forbidden() { + IIdType id = createPatient(); + createTypeInPatientCompartmentRule(id); + validateDeleteConditionalByUrlIsForbidden("Observation?_expunge=true&_id=" + id.getIdPart()); + } + + private void validateDeleteConditionalByUrlIsForbidden(String theUrl) { + try { + executeDeleteConditionalByUrl(theUrl); + fail(); + } catch (ForbiddenOperationException e) { + // good + } + } + + private void executeDeleteConditionalByUrl(String theUrl) { + myClient.delete() + .resourceConditionalByUrl(theUrl) .execute(); } + private IIdType createPatient() { + Patient patient = new Patient(); + patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100"); + patient.addName().setFamily("Tester").addGiven("Raghad"); + return myClient.create().resource(patient).execute().getId().toUnqualifiedVersionless(); + } + @Test public void testSmartFilterSearchAllowed() { createObservation(withId("allowed"), withObservationCode(TermTestUtil.URL_MY_CODE_SYSTEM, "A")); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java index f879e73cee1..b924cf92a70 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java @@ -48,6 +48,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -56,6 +57,8 @@ import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import static ca.uhn.fhir.rest.server.interceptor.auth.AppliesTypeEnum.ALL_RESOURCES; +import static ca.uhn.fhir.rest.server.interceptor.auth.AppliesTypeEnum.TYPES; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -267,13 +270,16 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { case DELETE: if (theOperation == RestOperationTypeEnum.DELETE) { if (thePointcut == Pointcut.STORAGE_PRE_DELETE_EXPUNGE && myAppliesToDeleteExpunge) { - return newVerdict( - theOperation, - theRequestDetails, - theInputResource, - theInputResourceId, - theOutputResource, - theRuleApplier); + target.resourceType = theRequestDetails.getResourceName(); + String[] resourceIds = theRequestDetails.getParameters().get("_id"); + if (resourceIds != null) { + target.resourceIds = + extractResourceIdsFromRequestParameters(theRequestDetails, resourceIds); + } else { + target.setSearchParams(theRequestDetails); + } + + break; } if (myAppliesToDeleteCascade != (thePointcut == Pointcut.STORAGE_CASCADE_DELETE)) { return null; @@ -431,6 +437,18 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { theRuleApplier); } + private List extractResourceIdsFromRequestParameters( + RequestDetails theRequestDetails, String[] theResourceIds) { + return Arrays.stream(theResourceIds) + .map(id -> { + IIdType inputResourceId = + theRequestDetails.getFhirContext().getVersion().newIdType(); + inputResourceId.setParts(null, theRequestDetails.getResourceName(), id, null); + return inputResourceId; + }) + .collect(Collectors.toList()); + } + /** * Apply any special processing logic specific to this rule. * This is intended to be overridden. From 8a372231069c32ca2d9e4f786fa7b992b7eabc91 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Sun, 21 Apr 2024 13:20:55 -0700 Subject: [PATCH 21/26] Version bump --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- hapi-fhir-bom/pom.xml | 4 ++-- hapi-fhir-checkstyle/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 2 +- hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jpa/pom.xml | 2 +- hapi-fhir-jpaserver-base/pom.xml | 2 +- hapi-fhir-jpaserver-elastic-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-hfql/pom.xml | 2 +- hapi-fhir-jpaserver-ips/pom.xml | 2 +- hapi-fhir-jpaserver-mdm/pom.xml | 2 +- hapi-fhir-jpaserver-model/pom.xml | 2 +- hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu2/pom.xml | 2 +- hapi-fhir-jpaserver-test-dstu3/pom.xml | 2 +- hapi-fhir-jpaserver-test-r4/pom.xml | 2 +- hapi-fhir-jpaserver-test-r4b/pom.xml | 2 +- hapi-fhir-jpaserver-test-r5/pom.xml | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 2 +- hapi-fhir-server-cds-hooks/pom.xml | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- hapi-fhir-server-openapi/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml | 2 +- hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml | 4 ++-- hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml | 2 +- hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml | 2 +- hapi-fhir-serviceloaders/pom.xml | 2 +- .../hapi-fhir-spring-boot-autoconfigure/pom.xml | 2 +- .../hapi-fhir-spring-boot-sample-client-apache/pom.xml | 2 +- .../hapi-fhir-spring-boot-sample-client-okhttp/pom.xml | 2 +- .../hapi-fhir-spring-boot-sample-server-jersey/pom.xml | 2 +- hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml | 2 +- hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-sql-migrate/pom.xml | 2 +- hapi-fhir-storage-batch2-jobs/pom.xml | 2 +- hapi-fhir-storage-batch2-test-utilities/pom.xml | 2 +- hapi-fhir-storage-batch2/pom.xml | 2 +- hapi-fhir-storage-cr/pom.xml | 2 +- hapi-fhir-storage-mdm/pom.xml | 2 +- hapi-fhir-storage-test-utilities/pom.xml | 2 +- hapi-fhir-storage/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- hapi-fhir-structures-r4b/pom.xml | 2 +- hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- hapi-fhir-validation-resources-dstu2.1/pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r4b/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 2 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 2 +- tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml | 2 +- tests/hapi-fhir-base-test-mindeps-client/pom.xml | 2 +- tests/hapi-fhir-base-test-mindeps-server/pom.xml | 2 +- 77 files changed, 79 insertions(+), 79 deletions(-) diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 878f8e86233..83b6cb38520 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index e60ddcaff71..87706cf277d 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index c5ec6eee1f1..ed0a9fc6da4 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index e816820a5b6..f05fea85c67 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index 5c2d84271c1..69c0460cccf 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index 259e452b291..ee43373d473 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index 0a92269dd80..ec3ae13b3f0 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index f4735a64673..f7053e7e873 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index 42dca0baa55..35617f0402f 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 3b36b9a78cc..e974a566096 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 0fb7336aa4e..a14196db410 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index bc58e77989b..3dda194c646 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index f06c114a1e0..41acfef29b0 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index 062612c14f4..2079a6bb970 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index 8abee5044e7..7077325c2c6 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index 90fe5c84f17..b7dd29597aa 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 6e69cafa0a1..6cf0698fa2e 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index c7d8789b66b..bb2281e78d6 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml index 21658470076..1bdf1f4d5eb 100644 --- a/hapi-fhir-jpaserver-hfql/pom.xml +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index 60ecf7dda97..164b0a28831 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 9da334e1db8..1a7598268ab 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 75996628b21..2cee62cefaf 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 3e759bdd325..f0e7c3a6b94 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index c7372d53fe0..a79817e0242 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index e983f23ef78..a95f7d93414 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index f7109d2f830..c94281fbf8f 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index 91490d9aa59..a9b47640828 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index ca1a4a08fbd..8a2cd86b95d 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index ca814fc3407..0c7622cb1df 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index 0592981d2ce..b756a6dbbd0 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index bb49ce2cff0..cd7ce2cb444 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index 5de3625c9fa..10ad59331cd 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 33c0d53b3c7..a012bafd9b5 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index 8ebb87af6b9..a152e68cf96 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 539c930b36f..e81d4d6c257 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index fd86c76c918..b545d5e6ca6 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index 6f3ad9aebf0..a3eea68ca7a 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index 083fd428a66..f009f0fb9e9 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index d76aaae39a5..0c262056280 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index d8b0ef67672..fd2e15e3e75 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index a36a8106d02..7ac274198b7 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index 5ab1e07f34c..07748abe01a 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index 89e66d269d4..c3cdf23bf3c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index 9ab88bfcc86..3290e867a12 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index a3c882ef6b6..bd198dfe7ae 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 65d782703f7..924eb57db31 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 7c12175c3ba..2b661d2f417 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index dee88ccf8b7..508e17fe930 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index d5db154e318..71238102505 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index c06a923d4dd..8d57d1b7948 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index 91f34741c1f..41c53b9e27e 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index 49893d00f35..bcdfc3b4916 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index df18f8b9d40..23727b3009c 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index d93b55c5cb6..f6a72dbc6ab 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index 5aea02bd8f2..1ff49ea3ab7 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index e62996475ba..43a2ba0fab3 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 7297607fc8a..2138b6f052c 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index 821380e7508..c94c9e07e17 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 88120888121..c380b9e13a3 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index d0b3a4a6561..ba5db156616 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index 26d6ce29e11..452563edf9d 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 0c68498f67c..270596ab0cd 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index dedb670bef1..dc5ad4d0fc8 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 8f5f6350e30..162dc758563 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 5d10e9b7631..4458b64cc8b 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index f72ebdf1655..3d923001ddb 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 7ca2befe340..1dcfb04cbbf 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 73a32446518..f2e3aa6f5c0 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index 4c457ee1d84..1af75ea43e2 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 6bebb0710d6..989a90079bf 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index 18a7b98a41e..347c73a1527 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index cf35dee8dcf..34cfa205251 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 9044c7b169e..aa0dda090ec 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 0ffabdf9385..deb48482f14 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index a1d051bf841..54410998f56 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 44c164994db..b0de40fff10 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 4744f63418c..14cdca64555 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.10-SNAPSHOT + 7.3.0-SNAPSHOT ../../pom.xml From 6653a1e1b2d2ef0470d6086190bc656879cfb456 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Sun, 21 Apr 2024 13:23:34 -0700 Subject: [PATCH 22/26] Versions added --- .../src/main/java/ca/uhn/fhir/util/VersionEnum.java | 5 ++++- .../resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md | 0 .../resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java index 2a097912ce7..75760bbbd30 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java @@ -153,7 +153,10 @@ public enum VersionEnum { V7_0_2, V7_1_0, - V7_2_0; + V7_2_0, + + V7_3_0, + V7_4_0; public static VersionEnum latestVersion() { VersionEnum[] values = VersionEnum.values(); diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml new file mode 100644 index 00000000000..f6531accc06 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2024-08-18" +codename: "TBD" From cd4560390c2dfae35dea0a65c98c53e72c91240d Mon Sep 17 00:00:00 2001 From: Tadgh Date: Sun, 21 Apr 2024 13:25:31 -0700 Subject: [PATCH 23/26] name the version --- .../resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml index f6531accc06..6fe664dd8a2 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/version.yaml @@ -1,3 +1,3 @@ --- release-date: "2024-08-18" -codename: "TBD" +codename: "Copernicus" From 90201095c468aa4ff35ccbdb7b05652a3d4277c6 Mon Sep 17 00:00:00 2001 From: kateryna-mironova <107507153+kateryna-mironova@users.noreply.github.com> Date: Tue, 23 Apr 2024 10:08:32 -0400 Subject: [PATCH 24/26] fixed golden resource tag bug (#5868) --- ...olden-resource-tags-being-overwritten.yaml | 4 ++ .../jpa/dao/JpaStorageResourceParser.java | 3 +- .../jpa/dao/JpaStorageResourceParserTest.java | 56 +++++++++++++++++++ 3 files changed, 62 insertions(+), 1 deletion(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5867-golden-resource-tags-being-overwritten.yaml create mode 100644 hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5867-golden-resource-tags-being-overwritten.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5867-golden-resource-tags-being-overwritten.yaml new file mode 100644 index 00000000000..36e80dd8883 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5867-golden-resource-tags-being-overwritten.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5867 +title: "Previously, when adding multiple resources and defining a golden resource using MDM, the golden resource's tags were removed. This has been fixed" diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java index 5bcce1c0c0e..6bab499210c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java @@ -58,6 +58,7 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.IMetaTagSorter; import ca.uhn.fhir.util.MetaUtil; import jakarta.annotation.Nullable; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseCoding; @@ -470,7 +471,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { res.getMeta().setLastUpdated(theEntity.getUpdatedDate()); IDao.RESOURCE_PID.put(res, theEntity.getResourceId()); - if (theTagList != null) { + if (CollectionUtils.isNotEmpty(theTagList)) { res.getMeta().getTag().clear(); res.getMeta().getProfile().clear(); res.getMeta().getSecurity().clear(); diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java new file mode 100644 index 00000000000..c427b7bdd2f --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java @@ -0,0 +1,56 @@ +package ca.uhn.fhir.jpa.dao; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.jpa.entity.ResourceSearchView; +import ca.uhn.fhir.jpa.model.entity.BaseTag; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.Constants; +import org.hl7.fhir.r4.hapi.ctx.FhirR4; +import org.hl7.fhir.r4.model.Coding; +import org.hl7.fhir.r4.model.Patient; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.Collections; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.*; + +@ExtendWith(MockitoExtension.class) +public class JpaStorageResourceParserTest { + + @Mock + private FhirContext myFhirContext; + + @Mock + private ResourceSearchView patientSearchView; + @InjectMocks + private final JpaStorageResourceParser jpaStorageResourceParser = new JpaStorageResourceParser(); + + @Test + public void testPopulateResourceMeta_doesNotRemoveTags_whenTagListIsEmpty() { + Mockito.when(myFhirContext.getVersion()).thenReturn(new FhirR4()); + Mockito.when(patientSearchView.getIdDt()).thenReturn(new IdDt("Patient/test-patient/_history/1")); + + Coding coding = new Coding("system", "code", "display"); + List tagList = Collections.emptyList(); + boolean forHistoryOperation = false; + long version = 1L; + Patient resourceTarget = new Patient(); + resourceTarget.getMeta().addTag(coding); + + Patient actualResult = jpaStorageResourceParser + .populateResourceMetadata(patientSearchView, forHistoryOperation, tagList, version, resourceTarget); + + List actualTagList = actualResult.getMeta().getTag(); + assertFalse(actualTagList.isEmpty()); + assertEquals(actualTagList.size(), 1); + assertTrue(actualTagList.get(0).equals(coding)); + } +} \ No newline at end of file From 72ff192df8a8ce779d07aa61f2fdebeeb9fcc017 Mon Sep 17 00:00:00 2001 From: holly-smile <166412459+holly-smile@users.noreply.github.com> Date: Thu, 25 Apr 2024 13:39:10 -0700 Subject: [PATCH 25/26] Fix error message for HAPI-0302 showing misleading names for known resource types (#5875) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * A solution, but perhaps not the best one * Make behavior more robust and extendable. Fixes #5874 * Apply spotless * Add changelog * 😅 * Add unit test * Get name from context instead of hard-coding remapping --- ...isleading-resource-type-error-message.yaml | 5 ++++ .../uhn/fhir/rest/server/RestfulServer.java | 3 +- .../fhir/rest/server/RestfulServerTest.java | 30 +++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5874-fix-misleading-resource-type-error-message.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5874-fix-misleading-resource-type-error-message.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5874-fix-misleading-resource-type-error-message.yaml new file mode 100644 index 00000000000..a6c084b71d4 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5874-fix-misleading-resource-type-error-message.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5874 +jira: SMILE-8149 +title: "Fixed a bug where 'List' would be incorrectly shown as 'ListResource' in the error response for a GET for an invalid resource." diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java index d3386e5b9b1..b0d329fea2b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java @@ -1990,7 +1990,8 @@ public class RestfulServer extends HttpServlet implements IRestfulServer knownDistinctAndSortedResourceTypes = myResourceProviders.stream() - .map(t -> t.getResourceType().getSimpleName()) + .map(t -> + myFhirContext.getResourceDefinition(t.getResourceType()).getName()) .distinct() .sorted() .collect(toList()); diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/RestfulServerTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/RestfulServerTest.java index d7fdff76d8d..ea851529eba 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/RestfulServerTest.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/RestfulServerTest.java @@ -10,13 +10,17 @@ import ca.uhn.fhir.rest.annotation.Metadata; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.api.MethodOutcome; +import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.server.IFhirVersionServer; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseConformance; import org.hl7.fhir.instance.model.api.IBaseMetaType; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.ListResource; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -88,6 +92,20 @@ public class RestfulServerTest { } } + @Test + public void exceptionHandling() { + final SystemRequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setRequestType(RequestTypeEnum.GET); + requestDetails.setResourceName("InvalidResourceName"); + myRestfulServer.registerProvider(new MyListResourceProvider()); + ResourceNotFoundException thrown = assertThrows( + ResourceNotFoundException.class, + () -> myRestfulServer.determineResourceMethod(requestDetails, "1234"), + "Expected request to fail, but it succeeded."); + + assertTrue(thrown.getMessage().contains("List")); + assertFalse(thrown.getMessage().contains("ListResource")); + } //--------- Scaffolding ---------// private static class MyClassWithoutRestInterface implements Serializable { @@ -141,6 +159,18 @@ public class RestfulServerTest { } } + private static class MyListResourceProvider implements IResourceProvider { + @Create + public MethodOutcome create(@ResourceParam IBaseResource theResource) { + return mock(MethodOutcome.class); + } + + @Override + public Class getResourceType() { + return ListResource.class; + } + } + private static class MyProvider implements IResourceProvider { @Operation(name = "SHOW_ME_THE_MONEY", typeName = "MyResource") public IBaseBundle match() { From 660de6cc6089609598969b3b4d8c63a53a3dda05 Mon Sep 17 00:00:00 2001 From: Etienne Poirier <33007955+epeartree@users.noreply.github.com> Date: Fri, 26 Apr 2024 11:03:59 -0400 Subject: [PATCH 26/26] 5877 attempting to update a tokenparam with a value greater than 200 characters raises a sqlexception (#5878) * initial failing test. * solution * adding changelog * spotless --------- Co-authored-by: peartree --- ...updating-token-param-with-large-value.yaml | 5 +++ .../ResourceIndexedSearchParamToken.java | 2 + .../jpa/dao/r4/FhirResourceDaoR4Test.java | 40 ++++++++++++++++++- 3 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5877-exception-when-updating-token-param-with-large-value.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5877-exception-when-updating-token-param-with-large-value.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5877-exception-when-updating-token-param-with-large-value.yaml new file mode 100644 index 00000000000..ad6bbd02d1f --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5877-exception-when-updating-token-param-with-large-value.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5877 +title: "Previously, updating a tokenParam with a value greater than 200 characters would raise a SQLException. +This issue has been fixed." diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java index 2a8ba6e8d03..9066f9f25db 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java @@ -36,6 +36,7 @@ import jakarta.persistence.Index; import jakarta.persistence.JoinColumn; import jakarta.persistence.ManyToOne; import jakarta.persistence.PrePersist; +import jakarta.persistence.PreUpdate; import jakarta.persistence.SequenceGenerator; import jakarta.persistence.Table; import org.apache.commons.lang3.StringUtils; @@ -429,6 +430,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa * We don't truncate earlier in the flow because the index hashes MUST be calculated on the full string. */ @PrePersist + @PreUpdate public void truncateFieldsForDB() { mySystem = StringUtils.truncate(mySystem, MAX_LENGTH); myValue = StringUtils.truncate(myValue, MAX_LENGTH); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java index 649359d3937..d0e33aaba8c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java @@ -54,7 +54,6 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.ClasspathUtil; -import com.google.common.base.Charsets; import com.google.common.collect.Lists; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.RandomStringUtils; @@ -83,6 +82,7 @@ import org.hl7.fhir.r4.model.CompartmentDefinition; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.Condition; import org.hl7.fhir.r4.model.Consent; +import org.hl7.fhir.r4.model.ContactPoint; import org.hl7.fhir.r4.model.DateTimeType; import org.hl7.fhir.r4.model.DateType; import org.hl7.fhir.r4.model.Device; @@ -157,9 +157,11 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasLength; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.matchesPattern; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.startsWith; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -247,6 +249,42 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { return retVal; } + @Test + public void testUpdateResource_whenTokenPropertyAssignedTooLargeValue_willTruncateLargeValueOnUpdate(){ + // given + final String modifiedEmailPrefix = "modified"; + final String originalEmail = RandomStringUtils.randomAlphanumeric(ResourceIndexedSearchParamToken.MAX_LENGTH) + "@acme.corp"; + final String modifiedEmail = modifiedEmailPrefix + originalEmail; + + // when + Patient pt1 = new Patient(); + pt1.setActive(true); + pt1.addName().setFamily("FAM"); + pt1.addTelecom().setSystem(ContactPoint.ContactPointSystem.EMAIL).setValue(originalEmail); + + myPatientDao.create(pt1).getId().toUnqualifiedVersionless(); + + pt1.getTelecomFirstRep().setValue(modifiedEmail); + + IIdType id1 = myPatientDao.update(pt1).getId().toUnqualifiedVersionless(); + + // then + runInTransaction(() -> { + List paramValues = myResourceIndexedSearchParamTokenDao + .findAll() + .stream() + .filter(t -> defaultString(t.getSystem()).equals("email")) + .map(t -> t.getValue()) + .collect(Collectors.toList()); + + assertThat(paramValues, hasSize(2)); + + for (String tokenValue : paramValues) { + assertThat(tokenValue, startsWith(modifiedEmailPrefix)); + assertThat(tokenValue, hasLength(ResourceIndexedSearchParamToken.MAX_LENGTH)); + } + }); + } @Test public void testDeletedResourcesAreReindexed() {