From 6226ae02d5310b38e5224338f379a0eb6d300746 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 18 Mar 2021 23:16:45 -0400 Subject: [PATCH 01/61] Add basic skeleton of MDM expansion --- .../ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java | 14 ++- .../fhir/jpa/dao/index/IdHelperService.java | 23 +++++ .../fhir/jpa/dao/mdm/MdmLinkExpandSvc.java | 88 +++++++++++++++++++ .../jpa/search/helper/SearchParamHelper.java | 60 +++++++++++++ .../fhir/jpa/term/BaseTermReadSvcImpl.java | 1 + .../validation/JpaValidationSupportChain.java | 1 + .../ca/uhn/fhir/jpa/config/TestR4Config.java | 3 +- .../ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java | 1 - .../fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java | 54 ++++++++++++ .../jpa/mdm/interceptor/MdmExpungeTest.java | 4 + .../uhn/fhir/jpa/config/TestJpaR4Config.java | 1 + 11 files changed, 246 insertions(+), 4 deletions(-) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java index 7ad98e46e11..28ea6cac850 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java @@ -33,11 +33,11 @@ import java.util.List; @Repository public interface IMdmLinkDao extends JpaRepository { @Modifying - @Query("DELETE FROM MdmLink f WHERE myGoldenResourcePid = :pid OR mySourcePid = :pid") + @Query("DELETE FROM MdmLink f WHERE f.myGoldenResourcePid = :pid OR f.mySourcePid = :pid") int deleteWithAnyReferenceToPid(@Param("pid") Long thePid); @Modifying - @Query("DELETE FROM MdmLink f WHERE (myGoldenResourcePid = :pid OR mySourcePid = :pid) AND myMatchResult <> :matchResult") + @Query("DELETE FROM MdmLink f WHERE (f.myGoldenResourcePid = :pid OR f.mySourcePid = :pid) AND f.myMatchResult <> :matchResult") int deleteWithAnyReferenceToPidAndMatchResultNot(@Param("pid") Long thePid, @Param("matchResult") MdmMatchResultEnum theMatchResult); @Query("SELECT ml2.myGoldenResourcePid, ml2.mySourcePid FROM MdmLink ml2 " + @@ -51,4 +51,14 @@ public interface IMdmLinkDao extends JpaRepository { "AND hrl.myTargetResourceType='Patient'" + ")") List> expandPidsFromGroupPidGivenMatchResult(@Param("groupPid") Long theGroupPid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); + + @Query("SELECT ml.myGoldenResourcePid, ml.mySourcePid " + + "FROM MdmLink ml " + + "INNER JOIN MdmLink ml2 " + + "on ml.myGoldenResourcePid=ml2.myGoldenResourcePid " + + "WHERE ml2.mySourcePid=:sourcePid " + + "AND ml2.myMatchResult=:matchResult " + + "AND ml.myMatchResult=:matchResult") + List> expandPidsBySourcePidAndMatchResult(@Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java index a8795a47d4f..d28ba15b5d1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java @@ -380,6 +380,29 @@ public class IdHelperService { } } + /** + * + * Given a set of PIDs, return a set of public FHIR Resource IDs. + * This function will resolve a forced ID if it resolves, and if it fails to resolve to a forced it, will just return the pid + * Example: + * Let's say we have Patient/1(pid == 1), Patient/pat1 (pid == 2), Patient/3 (pid == 3), their pids would resolve as follows: + * + * [1,2,3] -> ["1","pat1","3"] + * + * @param thePids The Set of pids you would like to resolve to external FHIR Resource IDs. + * @return A Set of strings representing the FHIR IDs of the pids. + */ + public Set translatePidsToFhirResourceIds(Set thePids) { + Map> pidToForcedIdMap = translatePidsToForcedIds(thePids); + + //If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID. + Set resolvedResourceIds = pidToForcedIdMap.entrySet().stream() + .map(entry -> entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString()) + .collect(Collectors.toSet()); + + return resolvedResourceIds; + + } public Map> translatePidsToForcedIds(Set thePids) { Map> retVal = new HashMap<>(myMemoryCacheService.getAllPresent(MemoryCacheService.CacheEnum.FORCED_ID, thePids)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java new file mode 100644 index 00000000000..69d978725ae --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java @@ -0,0 +1,88 @@ +package ca.uhn.fhir.jpa.dao.mdm; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao; +import ca.uhn.fhir.jpa.dao.index.IdHelperService; +import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +@Service +public class MdmLinkExpandSvc { + private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkExpandSvc.class); + + @Autowired + private IMdmLinkDao myMdmLinkDao; + @Autowired + private IdHelperService myIdHelperService; + + /** + * Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are + * MDM-Matched to this resource. + * + * @param theResource The resource to MDM-Expand + * @return A set of strings representing the FHIR IDs of the expanded resources. + */ + public Set expandMdmBySourceResource(IBaseResource theResource) { + return expandMdmBySourceResourceId(theResource.getIdElement()); + } + + /** + * Given a resource ID of a source resource, perform MDM expansion and return all the resource IDs of all resources that are + * MDM-Matched to this resource. + * + * @param theId The Resource ID of the resource to MDM-Expand + * @return A set of strings representing the FHIR ids of the expanded resources. + */ + public Set expandMdmBySourceResourceId(IIdType theId) { + Long pidOrThrowException = myIdHelperService.getPidOrThrowException(theId); + return expandMdmBySourceResourcePid(pidOrThrowException); + } + + /** + * Given a PID of a source resource, perform MDM expansion and return all the resource IDs of all resources that are + * MDM-Matched to this resource. + * + * @param theSourceResourcePid The PID of the resource to MDM-Expand + * @return A set of strings representing the FHIR ids of the expanded resources. + */ + public Set expandMdmBySourceResourcePid(Long theSourceResourcePid) { + List> goldenPidSourcePidTuples = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(theSourceResourcePid, MdmMatchResultEnum.MATCH); + Set flattenedPids = new HashSet<>(); + goldenPidSourcePidTuples.forEach(flattenedPids::addAll); + + Set resourceIds = myIdHelperService.translatePidsToFhirResourceIds(flattenedPids); + return resourceIds; + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java new file mode 100644 index 00000000000..8b18d6faba8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java @@ -0,0 +1,60 @@ +package ca.uhn.fhir.jpa.search.helper; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.context.RuntimeSearchParam; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Service +public class SearchParamHelper { + + @Autowired + private FhirContext myFhirContext; + + + public Collection getPatientSearchParamsForResourceType(String theResourceType) { + RuntimeResourceDefinition runtimeResourceDefinition = myFhirContext.getResourceDefinition(theResourceType); + Map searchParams = new HashMap<>(); + + RuntimeSearchParam patientSearchParam = runtimeResourceDefinition.getSearchParam("patient"); + if (patientSearchParam != null) { + searchParams.put(patientSearchParam.getName(), patientSearchParam); + + } + RuntimeSearchParam subjectSearchParam = runtimeResourceDefinition.getSearchParam("subject"); + if (subjectSearchParam != null) { + searchParams.put(subjectSearchParam.getName(), subjectSearchParam); + } + + List compartmentSearchParams = getPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition); + compartmentSearchParams.forEach(param -> searchParams.put(param.getName(), param)); + + return searchParams.values(); + } + + /** + * Search the resource definition for a compartment named 'patient' and return its related Search Parameter. + */ + public List getPatientCompartmentRuntimeSearchParams(RuntimeResourceDefinition runtimeResourceDefinition) { + List patientSearchParam = new ArrayList<>(); + List searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); + return searchParams; +// if (searchParams == null || searchParams.size() == 0) { +// String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType); +// throw new IllegalArgumentException(errorMessage); +// } else if (searchParams.size() == 1) { +// patientSearchParam = searchParams.get(0); +// } else { +// String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as we are unable to disambiguate which patient search parameter we should be searching by.", myResourceType); +// throw new IllegalArgumentException(errorMessage); +// } +// return patientSearchParam; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java index 30526d363e0..687ec721349 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java @@ -2635,6 +2635,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { @CoverageIgnore @Override public IValidationSupport.CodeValidationResult validateCode(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + //TODO GGG TRY TO JUST AUTO_PASS HERE AND SEE WHAT HAPPENS. invokeRunnableForUnitTest(); if (isNotBlank(theValueSetUrl)) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java index e3d50ce141a..eddfbacf81a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java @@ -68,6 +68,7 @@ public class JpaValidationSupportChain extends ValidationSupportChain { public void postConstruct() { addValidationSupport(myDefaultProfileValidationSupport); addValidationSupport(myJpaValidationSupport); + //TODO MAKE SURE THAT THIS IS BEING CAL addValidationSupport(myTerminologyService); addValidationSupport(new SnapshotGeneratingValidationSupport(myFhirContext)); addValidationSupport(new InMemoryTerminologyServerValidationSupport(myFhirContext)); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java index d62a87b6818..222ecfe943c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java @@ -112,7 +112,8 @@ public class TestR4Config extends BaseJavaConfigR4 { }; retVal.setDriver(new org.h2.Driver()); - retVal.setUrl("jdbc:h2:mem:testdb_r4"); + retVal.setUrl("jdbc:h2:file:/home/tadgh/smile/hapi-fhir/testdb_r4.db"); +// retVal.setUrl("jdbc:h2:mem:testdb_r4"); retVal.setMaxWaitMillis(30000); retVal.setUsername(""); retVal.setPassword(""); diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java index 4245c859123..3c1b28764fa 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java @@ -182,7 +182,6 @@ abstract public class BaseMdmR4Test extends BaseJpaR4Test { Patient patient = (Patient) outcome.getResource(); patient.setId(outcome.getId()); return patient; - } @Nonnull diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java index 3d8c392e9a7..cd3978133b8 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java @@ -2,15 +2,26 @@ package ca.uhn.fhir.jpa.mdm.dao; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.IMdmSettings; +import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.rules.json.MdmRulesJson; import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test; import ca.uhn.fhir.jpa.entity.MdmLink; import ca.uhn.fhir.jpa.util.TestUtil; +import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.stream.Collectors; + import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -48,4 +59,47 @@ public class MdmLinkDaoSvcTest extends BaseMdmR4Test { assertEquals(rules.getVersion(), newLink.getVersion()); } + @Test + public void testExpandPidsWorks() { + + Patient golden = createGoldenPatient(); + + //Create 10 linked patients. + List mdmLinks = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + mdmLinks.add(createPatientAndLinkTo(golden.getIdElement().getIdPartAsLong(), MdmMatchResultEnum.MATCH)); + } + + //Now lets connect a few as just POSSIBLE_MATCHes and ensure they aren't returned. + for (int i = 0 ; i < 5; i++) { + createPatientAndLinkTo(golden.getIdElement().getIdPartAsLong(), MdmMatchResultEnum.POSSIBLE_MATCH); + } + + List expectedExpandedPids = mdmLinks.stream().map(MdmLink::getSourcePid).collect(Collectors.toList()); + + //SUT + List> lists = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(mdmLinks.get(0).getSourcePid(), MdmMatchResultEnum.MATCH); + + assertThat(lists, hasSize(10)); + + lists.stream() + .forEach(pair -> { + assertThat(pair.get(0), is(equalTo(golden.getIdElement().getIdPartAsLong()))); + assertThat(pair.get(1), is(in(expectedExpandedPids))); + }); + } + + private MdmLink createPatientAndLinkTo(Long thePatientPid, MdmMatchResultEnum theMdmMatchResultEnum) { + Patient patient = createPatient(); + + MdmLink mdmLink = myMdmLinkDaoSvc.newMdmLink(); + mdmLink.setLinkSource(MdmLinkSourceEnum.MANUAL); + mdmLink.setMatchResult(theMdmMatchResultEnum); + mdmLink.setCreated(new Date()); + mdmLink.setUpdated(new Date()); + mdmLink.setGoldenResourcePid(thePatientPid); + mdmLink.setSourcePid(myIdHelperService.getPidOrNull(patient)); + MdmLink saved= myMdmLinkDao.save(mdmLink); + return saved; + } } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmExpungeTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmExpungeTest.java index a0ca44cc446..659222d3e2a 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmExpungeTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmExpungeTest.java @@ -16,6 +16,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import java.util.List; + import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.StringContains.containsString; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -49,6 +51,8 @@ public class MdmExpungeTest extends BaseMdmR4Test { saveLink(mdmLink); } + + @Test public void testUninterceptedDeleteRemovesMdmReference() { assertEquals(1, myMdmLinkDao.count()); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java index 1716e099cad..d5c08620370 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java @@ -68,6 +68,7 @@ public class TestJpaR4Config extends BaseJavaConfigR4 { retVal.setDriver(new org.h2.Driver()); retVal.setUrl("jdbc:h2:mem:testdb_r4"); +// retVal.setUrl("jdbc:h2:file:/home/tadgh/smile/hapi-fhir/testdb_r4.db"); retVal.setMaxWaitMillis(10000); retVal.setUsername(""); retVal.setPassword(""); From 659a8771003555d66c0e29c148eea3075b61bd8d Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 18 Mar 2021 23:17:56 -0400 Subject: [PATCH 02/61] Add interceptor --- ...SearchExpandingInterceptorInterceptor.java | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java new file mode 100644 index 00000000000..9c61a0ad185 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java @@ -0,0 +1,105 @@ +package ca.uhn.fhir.jpa.interceptor; + +/*- + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.Interceptor; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.dao.index.IdHelperService; +import ca.uhn.fhir.jpa.dao.mdm.MdmLinkExpandSvc; +import ca.uhn.fhir.jpa.search.helper.SearchParamHelper; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.model.api.IQueryParameterAnd; +import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.EncodingEnum; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.param.StringParam; +import ca.uhn.fhir.util.ClasspathUtil; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.IBaseConformance; +import org.slf4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.slf4j.LoggerFactory.getLogger; + +/** + * This interceptor replaces the auto-generated CapabilityStatement that is generated + * by the HAPI FHIR Server with a static hard-coded resource. + */ +@Interceptor +public class MdmSearchExpandingInterceptorInterceptor { + private static final Logger ourLog = getLogger(MdmSearchExpandingInterceptorInterceptor.class); + + @Autowired + private MdmLinkExpandSvc myMdmLinkExpandSvc; + @Autowired + private SearchParamHelper mySearchParamHelper; + @Autowired + private FhirContext myFhirContext; + @Autowired + private IdHelperService myIdHelperService; + + + @Hook(Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH) + public boolean hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) { + Map parameters =theRequestDetails.getParameters(); + boolean shouldExpandMdm = false; + if (parameters.containsKey("_mdm")) { + shouldExpandMdm = parameters.get("_mdm").length == 1 && parameters.get("_mdm")[0].equalsIgnoreCase("true"); + } + + + if (shouldExpandMdm) { + String resourceName = theRequestDetails.getResourceName(); + Collection patientSearchParams = mySearchParamHelper.getPatientSearchParamsForResourceType(resourceName); + for (RuntimeSearchParam patientSearchParam: patientSearchParams) { + if (!theSearchParameterMap.containsKey(patientSearchParam.getName())) { + continue; + } + List> lists = theSearchParameterMap.get(patientSearchParam.getName()); + for (List list : lists) { + List toAdd = new ArrayList<>(); + for (IQueryParameterType paramVal : list) { + if (!paramVal.getMissing() && paramVal.getQueryParameterQualifier().equalsIgnoreCase("equals")){ + String valueAsQueryToken = paramVal.getValueAsQueryToken(myFhirContext); + Long pidOrThrowException = myIdHelperService.getPidOrThrowException(new IdDt(valueAsQueryToken)); + Set expandedIds= myMdmLinkExpandSvc.expandMdmBySourceResourcePid(pidOrThrowException); + ourLog.info("Expanded to resource ids: [{}]", String.join(",", expandedIds)); + toAdd.addAll(expandedIds.stream().map(StringParam::new).collect(Collectors.toList())); + } + } + list.addAll(toAdd); + } + } + } + return true; + } +} From e6cfb77c79f253542b70136ba4921de7b01c2bbd Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 23 Mar 2021 18:11:39 -0400 Subject: [PATCH 03/61] Wip --- .../src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java | 7 +++++++ .../java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java | 8 ++++++-- .../MdmSearchExpandingInterceptorInterceptor.java | 8 ++++---- .../test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 4 ++++ .../r4/FhirResourceDaoR4SearchCustomSearchParamTest.java | 3 +-- .../r4/ResourceProviderCustomSearchParamR4Test.java | 9 ++++++++- .../fhir/jpa/mdm/interceptor/MdmStorageInterceptor.java | 5 ++--- 7 files changed, 32 insertions(+), 12 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index 579ef42ee11..6ef6a72f468 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -59,6 +59,7 @@ import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.graphql.JpaStorageServices; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; +import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptorInterceptor; import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor; import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; @@ -457,6 +458,12 @@ public abstract class BaseConfig { return new RequestTenantPartitionInterceptor(); } + @Bean + @Lazy + public MdmSearchExpandingInterceptorInterceptor mdmSearchExpandingInterceptorInterceptor() { + return new MdmSearchExpandingInterceptorInterceptor(); + } + @Bean @Lazy public TerminologyUploaderProvider terminologyUploaderProvider() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java index 69d978725ae..a04e4c2f795 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java @@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.dao.mdm; import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; +import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -39,7 +40,7 @@ import java.util.Set; @Service public class MdmLinkExpandSvc { - private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkExpandSvc.class); + private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); @Autowired private IMdmLinkDao myMdmLinkDao; @@ -54,6 +55,7 @@ public class MdmLinkExpandSvc { * @return A set of strings representing the FHIR IDs of the expanded resources. */ public Set expandMdmBySourceResource(IBaseResource theResource) { + ourLog.debug("About to MDM-expand source resource {}", theResource); return expandMdmBySourceResourceId(theResource.getIdElement()); } @@ -65,6 +67,7 @@ public class MdmLinkExpandSvc { * @return A set of strings representing the FHIR ids of the expanded resources. */ public Set expandMdmBySourceResourceId(IIdType theId) { + ourLog.debug("About to expand source resource with resource id {}", theId); Long pidOrThrowException = myIdHelperService.getPidOrThrowException(theId); return expandMdmBySourceResourcePid(pidOrThrowException); } @@ -77,11 +80,12 @@ public class MdmLinkExpandSvc { * @return A set of strings representing the FHIR ids of the expanded resources. */ public Set expandMdmBySourceResourcePid(Long theSourceResourcePid) { + ourLog.debug("About to expand source resource with PID {}", theSourceResourcePid); List> goldenPidSourcePidTuples = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(theSourceResourcePid, MdmMatchResultEnum.MATCH); Set flattenedPids = new HashSet<>(); goldenPidSourcePidTuples.forEach(flattenedPids::addAll); - Set resourceIds = myIdHelperService.translatePidsToFhirResourceIds(flattenedPids); + ourLog.debug("Pid {} has been expanded to [{}]", theSourceResourcePid, String.join(",", resourceIds)); return resourceIds; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java index 9c61a0ad185..64762d566c2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java @@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.mdm.MdmLinkExpandSvc; import ca.uhn.fhir.jpa.search.helper.SearchParamHelper; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; @@ -56,7 +57,7 @@ import static org.slf4j.LoggerFactory.getLogger; */ @Interceptor public class MdmSearchExpandingInterceptorInterceptor { - private static final Logger ourLog = getLogger(MdmSearchExpandingInterceptorInterceptor.class); + private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); @Autowired private MdmLinkExpandSvc myMdmLinkExpandSvc; @@ -67,7 +68,6 @@ public class MdmSearchExpandingInterceptorInterceptor { @Autowired private IdHelperService myIdHelperService; - @Hook(Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH) public boolean hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) { Map parameters =theRequestDetails.getParameters(); @@ -75,11 +75,11 @@ public class MdmSearchExpandingInterceptorInterceptor { if (parameters.containsKey("_mdm")) { shouldExpandMdm = parameters.get("_mdm").length == 1 && parameters.get("_mdm")[0].equalsIgnoreCase("true"); } - - if (shouldExpandMdm) { + ourLog.debug("Detected that incoming request has _mdm=true. The request was: {}", theRequestDetails.getRequestPath()); String resourceName = theRequestDetails.getResourceName(); Collection patientSearchParams = mySearchParamHelper.getPatientSearchParamsForResourceType(resourceName); + ourLog.debug("Resource type {} has patient search parameters [{}]", resourceName, patientSearchParams.stream().map(RuntimeSearchParam::getName).collect(Collectors.joining(", "))); for (RuntimeSearchParam patientSearchParam: patientSearchParams) { if (!theSearchParameterMap.containsKey(patientSearchParam.getName())) { continue; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index 4d36ee8c648..8e0b9adb277 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -129,6 +129,7 @@ import org.hl7.fhir.r4.model.DocumentReference; import org.hl7.fhir.r4.model.Encounter; import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence; import org.hl7.fhir.r4.model.EpisodeOfCare; +import org.hl7.fhir.r4.model.ExplanationOfBenefit; import org.hl7.fhir.r4.model.Group; import org.hl7.fhir.r4.model.Immunization; import org.hl7.fhir.r4.model.ImmunizationRecommendation; @@ -378,6 +379,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil @Qualifier("myPatientDaoR4") protected IFhirResourceDaoPatient myPatientDao; @Autowired + @Qualifier("myExplanationOfBenefitDaoR4") + protected IFhirResourceDao myExplanationOfBenefitDao; + @Autowired protected IResourceTableDao myResourceTableDao; @Autowired protected IResourceHistoryTableDao myResourceHistoryTableDao; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java index 5e56111fa2d..7428da07671 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java @@ -38,6 +38,7 @@ import org.hl7.fhir.r4.model.DiagnosticReport; import org.hl7.fhir.r4.model.Encounter; import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender; +import org.hl7.fhir.r4.model.ExplanationOfBenefit; import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.Group; import org.hl7.fhir.r4.model.IntegerType; @@ -193,10 +194,8 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test search = myPatientDao.search(SearchParameterMap.newSynchronous("future-appointment-count", new NumberParam("lt0"))); assertEquals(0, search.size()); - } - /** * Draft search parameters should be ok even if they aren't completely valid */ diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java index 2da45eeda72..3b44e465d5e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java @@ -13,8 +13,11 @@ import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.gclient.DateClientParam; import ca.uhn.fhir.rest.gclient.ReferenceClientParam; +import ca.uhn.fhir.rest.gclient.StringClientParam; import ca.uhn.fhir.rest.gclient.TokenClientParam; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.BundleUtil; import org.apache.commons.io.IOUtils; @@ -31,6 +34,7 @@ import org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResource import org.hl7.fhir.r4.model.CodeType; import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender; +import org.hl7.fhir.r4.model.ExplanationOfBenefit; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Observation.ObservationStatus; import org.hl7.fhir.r4.model.Patient; @@ -54,6 +58,8 @@ import java.util.stream.Collectors; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; @@ -328,9 +334,9 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide List foundResources = toUnqualifiedVersionlessIdValues(bundle); assertThat(foundResources, contains(p1id.getValue())); - } + @SuppressWarnings("unused") @Test public void testSearchQualifiedWithCustomReferenceParam() { @@ -416,6 +422,7 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide } + /** * See #1300 */ diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptor.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptor.java index 06704d4ff66..14dc07b62db 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptor.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptor.java @@ -36,6 +36,8 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.Extension; +import org.hl7.fhir.r4.model.Patient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -57,13 +59,10 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor { private EIDHelper myEIDHelper; @Autowired private IMdmSettings myMdmSettings; - @Autowired - private GoldenResourceHelper myGoldenResourceHelper; @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED) public void blockManualResourceManipulationOnCreate(IBaseResource theBaseResource, RequestDetails theRequestDetails, ServletRequestDetails theServletRequestDetails) { - //If running in single EID mode, forbid multiple eids. if (myMdmSettings.isPreventMultipleEids()) { forbidIfHasMultipleEids(theBaseResource); From 9f13225aa5e6085f23c876fc388cb2d87a9225b1 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 26 Mar 2021 17:57:38 -0400 Subject: [PATCH 04/61] WIP interceptor using reference param modifier --- .../ca/uhn/fhir/interceptor/api/Pointcut.java | 6 ++- .../java/ca/uhn/fhir/rest/api/Constants.java | 1 + .../uhn/fhir/rest/param/ReferenceParam.java | 15 ++++++ .../ca/uhn/fhir/jpa/config/BaseConfig.java | 7 ++- ...SearchExpandingInterceptorInterceptor.java | 48 +++++-------------- .../jpa/search/SearchCoordinatorSvcImpl.java | 11 ++++- .../jpa/mdm/config/MdmConsumerConfig.java | 6 +++ .../MdmSubmitterInterceptorLoader.java | 4 ++ .../ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java | 3 ++ .../interceptor/MdmStorageInterceptorIT.java | 9 ++++ .../uhn/fhir/jpa/config/TestJpaR4Config.java | 1 - 11 files changed, 72 insertions(+), 39 deletions(-) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java index d5e9aa9a35b..837fc188567 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java @@ -1155,6 +1155,9 @@ public enum Pointcut implements IPointcut { * pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will * only be populated when operating in a RestfulServer implementation. It is provided as a convenience. * + *
  • + * ca.uhn.fhir.jpa.searchparam.SearchParameterMap - Contains the details of the search being checked. This can be modified. + *
  • * *

    * Hooks should return void. @@ -1163,7 +1166,8 @@ public enum Pointcut implements IPointcut { STORAGE_PRESEARCH_REGISTERED(void.class, "ca.uhn.fhir.rest.server.util.ICachedSearchDetails", "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.searchparam.SearchParameterMap" ), /** diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java index 93d957299d2..ef960acc0a2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java @@ -200,6 +200,7 @@ public class Constants { public static final String PARAMQUALIFIER_STRING_CONTAINS = ":contains"; public static final String PARAMQUALIFIER_STRING_EXACT = ":exact"; public static final String PARAMQUALIFIER_TOKEN_TEXT = ":text"; + public static final String PARAMQUALIFIER_MDM = ":mdm"; public static final int STATUS_HTTP_200_OK = 200; public static final int STATUS_HTTP_201_CREATED = 201; public static final int STATUS_HTTP_204_NO_CONTENT = 204; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java index a8c43a708bd..583edaa6731 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java @@ -22,6 +22,7 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; @@ -41,6 +42,7 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ private String myBaseUrl; private String myValue; private String myIdPart; + private Boolean myMdmExpand; /** * Constructor @@ -121,6 +123,11 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ @Override void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) { + if (Constants.PARAMQUALIFIER_MDM.equals(theQualifier)) { + myMdmExpand = true; + theQualifier = ""; + //TODO GGG i probably have to deal with chaining here? like refusing the mdm qualifier if i can detect its chained? + } String q = theQualifier; if (isNotBlank(q)) { if (q.startsWith(":")) { @@ -166,6 +173,14 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ return myBaseUrl; } + public boolean isMdmExpand() { + return myMdmExpand != null && myMdmExpand; + } + + public ReferenceParam setMdmExpand(boolean theMdmExpand) { + myMdmExpand = theMdmExpand; + return this; + } public String getChain() { return myChain; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index 6ef6a72f468..55617d7b2a2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.dao.index.DaoResourceLinkResolver; import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor; +import ca.uhn.fhir.jpa.dao.mdm.MdmLinkExpandSvc; import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilder; import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderCoords; import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderDate; @@ -459,11 +460,15 @@ public abstract class BaseConfig { } @Bean - @Lazy public MdmSearchExpandingInterceptorInterceptor mdmSearchExpandingInterceptorInterceptor() { return new MdmSearchExpandingInterceptorInterceptor(); } + @Bean + public MdmLinkExpandSvc myMdmLinkExpandSvc() { + return new MdmLinkExpandSvc(); + } + @Bean @Lazy public TerminologyUploaderProvider terminologyUploaderProvider() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java index 64762d566c2..04333b578a7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java @@ -35,6 +35,7 @@ import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.util.ClasspathUtil; import org.apache.commons.lang3.Validate; @@ -62,44 +63,21 @@ public class MdmSearchExpandingInterceptorInterceptor { @Autowired private MdmLinkExpandSvc myMdmLinkExpandSvc; @Autowired - private SearchParamHelper mySearchParamHelper; - @Autowired private FhirContext myFhirContext; @Autowired private IdHelperService myIdHelperService; - @Hook(Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH) - public boolean hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) { - Map parameters =theRequestDetails.getParameters(); - boolean shouldExpandMdm = false; - if (parameters.containsKey("_mdm")) { - shouldExpandMdm = parameters.get("_mdm").length == 1 && parameters.get("_mdm")[0].equalsIgnoreCase("true"); - } - if (shouldExpandMdm) { - ourLog.debug("Detected that incoming request has _mdm=true. The request was: {}", theRequestDetails.getRequestPath()); - String resourceName = theRequestDetails.getResourceName(); - Collection patientSearchParams = mySearchParamHelper.getPatientSearchParamsForResourceType(resourceName); - ourLog.debug("Resource type {} has patient search parameters [{}]", resourceName, patientSearchParams.stream().map(RuntimeSearchParam::getName).collect(Collectors.joining(", "))); - for (RuntimeSearchParam patientSearchParam: patientSearchParams) { - if (!theSearchParameterMap.containsKey(patientSearchParam.getName())) { - continue; - } - List> lists = theSearchParameterMap.get(patientSearchParam.getName()); - for (List list : lists) { - List toAdd = new ArrayList<>(); - for (IQueryParameterType paramVal : list) { - if (!paramVal.getMissing() && paramVal.getQueryParameterQualifier().equalsIgnoreCase("equals")){ - String valueAsQueryToken = paramVal.getValueAsQueryToken(myFhirContext); - Long pidOrThrowException = myIdHelperService.getPidOrThrowException(new IdDt(valueAsQueryToken)); - Set expandedIds= myMdmLinkExpandSvc.expandMdmBySourceResourcePid(pidOrThrowException); - ourLog.info("Expanded to resource ids: [{}]", String.join(",", expandedIds)); - toAdd.addAll(expandedIds.stream().map(StringParam::new).collect(Collectors.toList())); - } - } - list.addAll(toAdd); - } - } - } - return true; + @Hook(Pointcut.STORAGE_PRESEARCH_REGISTERED) + public void hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) { + System.out.println("zoop"); + theSearchParameterMap.values().stream() + .flatMap(Collection::stream) + .filter(queryParam -> queryParam instanceof ReferenceParam) + .filter(referenceParam -> ((ReferenceParam) referenceParam).isMdmExpand()) + .map(untypedParam -> (ReferenceParam)untypedParam) + .forEach(mdmReferenceParam -> { + System.out.println("zoop"); + System.out.println(mdmReferenceParam.toString()); + }); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java index 6786f9ff002..10a03f5c765 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java @@ -307,6 +307,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { ourLog.debug("Registering new search {}", searchUuid); + // Interceptor call: STORAGE_PRESEARCH_REGISTERED + HookParams params = new HookParams() + .add(ICachedSearchDetails.class, search) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(SearchParameterMap.class, theParams); + JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params); Class resourceTypeClass = myContext.getResourceDefinition(theResourceType).getImplementingClass(); final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(theCallingDao, theResourceType, resourceTypeClass); sb.setFetchSize(mySyncSize); @@ -382,13 +389,15 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { private PersistedJpaSearchFirstPageBundleProvider submitSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, String theQueryString, RequestPartitionId theRequestPartitionId) { StopWatch w = new StopWatch(); Search search = new Search(); + //TODO GGG MOVE THIS POPULATE AND ALSO THE HOOK CALL HIGHER UP IN THE STACK. populateSearchEntity(theParams, theResourceType, theSearchUuid, theQueryString, search, theRequestPartitionId); // Interceptor call: STORAGE_PRESEARCH_REGISTERED HookParams params = new HookParams() .add(ICachedSearchDetails.class, search) .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(SearchParameterMap.class, theParams); JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params); SearchTask task = new SearchTask(search, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId); diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java index 204b27e69f1..3ebb0dc6f4c 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.mdm.config; */ import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptorInterceptor; import ca.uhn.fhir.jpa.mdm.svc.MdmSurvivorshipSvcImpl; import ca.uhn.fhir.mdm.api.IMdmControllerSvc; import ca.uhn.fhir.mdm.api.IMdmExpungeSvc; @@ -79,6 +80,11 @@ public class MdmConsumerConfig { return new MdmStorageInterceptor(); } + @Bean + MdmSearchExpandingInterceptorInterceptor myMdmSearchExpandingInterceptorInterceptor() { + return new MdmSearchExpandingInterceptorInterceptor(); + } + @Bean IMdmSurvivorshipService mdmSurvivorshipService() { return new MdmSurvivorshipSvcImpl(); } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java index b3396377031..5d9360bb62d 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.mdm.interceptor; * #L% */ +import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptorInterceptor; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.interceptor.api.IInterceptorService; @@ -41,6 +42,8 @@ public class MdmSubmitterInterceptorLoader { @Autowired private IMdmStorageInterceptor myIMdmStorageInterceptor; @Autowired + private MdmSearchExpandingInterceptorInterceptor myMdmSearchExpandingInterceptorInterceptor; + @Autowired private IInterceptorService myInterceptorService; @Autowired private SubscriptionSubmitInterceptorLoader mySubscriptionSubmitInterceptorLoader; @@ -53,6 +56,7 @@ public class MdmSubmitterInterceptorLoader { myDaoConfig.addSupportedSubscriptionType(Subscription.SubscriptionChannelType.MESSAGE); myInterceptorService.registerInterceptor(myIMdmStorageInterceptor); + myInterceptorService.registerInterceptor(myMdmSearchExpandingInterceptorInterceptor); ourLog.info("MDM interceptor registered"); // We need to call SubscriptionSubmitInterceptorLoader.start() again in case there were no subscription types the first time it was called. mySubscriptionSubmitInterceptorLoader.start(); diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java index 3c1b28764fa..dae0782cccb 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java @@ -44,6 +44,7 @@ import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.ContactPoint; import org.hl7.fhir.r4.model.DateType; import org.hl7.fhir.r4.model.Medication; +import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Organization; import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Practitioner; @@ -96,6 +97,8 @@ abstract public class BaseMdmR4Test extends BaseJpaR4Test { @Autowired protected IFhirResourceDao myPractitionerDao; @Autowired + protected IFhirResourceDao myObservationDao; + @Autowired protected MdmResourceMatcherSvc myMdmResourceMatcherSvc; @Autowired protected IMdmLinkDao myMdmLinkDao; diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java index 5ad4b9ebe84..051bf27c546 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java @@ -6,10 +6,12 @@ import ca.uhn.fhir.jpa.entity.MdmLink; import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test; import ca.uhn.fhir.jpa.mdm.helper.MdmHelperConfig; import ca.uhn.fhir.jpa.mdm.helper.MdmHelperR4; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.mdm.model.CanonicalEID; import ca.uhn.fhir.mdm.rules.config.MdmSettings; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.server.TransactionLogMessages; import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; import org.hl7.fhir.instance.model.api.IAnyResource; @@ -65,6 +67,13 @@ public class MdmStorageInterceptorIT extends BaseMdmR4Test { assertLinkCount(1); } + @Test + public void testSearchExpandingInterceptorWorks() { + SearchParameterMap subject = new SearchParameterMap("subject", new ReferenceParam("Patient/123").setMdmExpand(true)).setLoadSynchronous(false); + myObservationDao.search(subject); + } + + @Test public void testDeleteGoldenResourceDeletesLinks() throws InterruptedException { myMdmHelper.createWithLatch(buildPaulPatient()); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java index d5c08620370..1716e099cad 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java @@ -68,7 +68,6 @@ public class TestJpaR4Config extends BaseJavaConfigR4 { retVal.setDriver(new org.h2.Driver()); retVal.setUrl("jdbc:h2:mem:testdb_r4"); -// retVal.setUrl("jdbc:h2:file:/home/tadgh/smile/hapi-fhir/testdb_r4.db"); retVal.setMaxWaitMillis(10000); retVal.setUsername(""); retVal.setPassword(""); From e9e20988c969096fb7645992e754389227ca563e Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 29 Mar 2021 10:49:58 -0400 Subject: [PATCH 05/61] Partial moving of presearch regsitered pointcut --- .../jpa/search/SearchCoordinatorSvcImpl.java | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java index 10a03f5c765..aa1d16493f6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java @@ -305,7 +305,10 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { public IBundleProvider registerSearch(final IFhirResourceDao theCallingDao, final SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { final String searchUuid = UUID.randomUUID().toString(); + final String queryString = theParams.toNormalizedQueryString(myContext); ourLog.debug("Registering new search {}", searchUuid); + Search search = new Search(); + populateSearchEntity(theParams, theResourceType, searchUuid, queryString, search, theRequestPartitionId); // Interceptor call: STORAGE_PRESEARCH_REGISTERED HookParams params = new HookParams() @@ -334,7 +337,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { cacheStatus = SearchCacheStatusEnum.NOT_TRIED; } - final String queryString = theParams.toNormalizedQueryString(myContext); if (cacheStatus != SearchCacheStatusEnum.NOT_TRIED) { if (theParams.getEverythingMode() == null) { if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) { @@ -347,7 +349,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } } - PersistedJpaSearchFirstPageBundleProvider retVal = submitSearch(theCallingDao, theParams, theResourceType, theRequestDetails, searchUuid, sb, queryString, theRequestPartitionId); + PersistedJpaSearchFirstPageBundleProvider retVal = submitSearch(theCallingDao, theParams, theResourceType, theRequestDetails, searchUuid, sb, queryString, theRequestPartitionId, search); retVal.setCacheStatus(cacheStatus); return retVal; @@ -386,25 +388,25 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } @NotNull - private PersistedJpaSearchFirstPageBundleProvider submitSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, String theQueryString, RequestPartitionId theRequestPartitionId) { + private PersistedJpaSearchFirstPageBundleProvider submitSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, String theQueryString, RequestPartitionId theRequestPartitionId, Search theSearch) { StopWatch w = new StopWatch(); - Search search = new Search(); +// Search search = new Search(); //TODO GGG MOVE THIS POPULATE AND ALSO THE HOOK CALL HIGHER UP IN THE STACK. - populateSearchEntity(theParams, theResourceType, theSearchUuid, theQueryString, search, theRequestPartitionId); +// populateSearchEntity(theParams, theResourceType, theSearchUuid, theQueryString, search, theRequestPartitionId); - // Interceptor call: STORAGE_PRESEARCH_REGISTERED - HookParams params = new HookParams() - .add(ICachedSearchDetails.class, search) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(SearchParameterMap.class, theParams); - JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params); +// Interceptor call: STORAGE_PRESEARCH_REGISTERED +// HookParams params = new HookParams() +// .add(ICachedSearchDetails.class, search) +// .add(RequestDetails.class, theRequestDetails) +// .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) +// .add(SearchParameterMap.class, theParams); +// JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params); - SearchTask task = new SearchTask(search, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId); - myIdToSearchTask.put(search.getUuid(), task); + SearchTask task = new SearchTask(theSearch, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId); + myIdToSearchTask.put(theSearch.getUuid(), task); myExecutor.submit(task); - PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, search, task, theSb); + PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb); ourLog.debug("Search initial phase completed in {}ms", w.getMillis()); return retVal; From 5f42743f48e96df6854de1e8db7fe80574326ce4 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 29 Mar 2021 19:16:26 -0400 Subject: [PATCH 06/61] wip --- .../search/lastn/config/TestElasticsearchContainerHelper.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java index ba85d2ee77f..8bb1ed88b05 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java @@ -21,6 +21,4 @@ public class TestElasticsearchContainerHelper { return new ElasticsearchContainer(ELASTICSEARCH_IMAGE) .withStartupTimeout(Duration.of(300, SECONDS)); - } - } From 838ac890d31f3b5833ced1b872ad5a2e6df6a29d Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 29 Mar 2021 19:19:37 -0400 Subject: [PATCH 07/61] wip --- .../search/lastn/config/TestElasticsearchContainerHelper.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java index 8bb1ed88b05..ba85d2ee77f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java @@ -21,4 +21,6 @@ public class TestElasticsearchContainerHelper { return new ElasticsearchContainer(ELASTICSEARCH_IMAGE) .withStartupTimeout(Duration.of(300, SECONDS)); + } + } From a15ded71408607f541e38a6991132ef59c85b5b6 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 29 Mar 2021 21:12:03 -0400 Subject: [PATCH 08/61] Interceptor now correctly intercepts at the right point --- .../java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java | 9 +++++++++ .../java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java | 7 +++++-- .../MdmSearchExpandingInterceptorInterceptor.java | 10 ++++++---- .../r4/ResourceProviderCustomSearchParamR4Test.java | 2 ++ .../ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java | 9 +++++---- .../jpa/mdm/interceptor/MdmStorageInterceptorIT.java | 3 +-- 6 files changed, 28 insertions(+), 12 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java index 53814c503b7..653c229727b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java @@ -58,4 +58,13 @@ public interface IMdmLinkDao extends JpaRepository { Long getSourcePid(); } + @Query("SELECT ml.myGoldenResourcePid, ml.mySourcePid " + + "FROM MdmLink ml " + + "INNER JOIN MdmLink ml2 " + + "on ml.myGoldenResourcePid=ml2.myGoldenResourcePid " + + "WHERE ml2.mySourcePid=:sourcePid " + + "AND ml2.myMatchResult=:matchResult " + + "AND ml.myMatchResult=:matchResult") + List expandPidsBySourcePidAndMatchResult(@Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java index a04e4c2f795..66af7f5a706 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkExpandSvc.java @@ -81,9 +81,12 @@ public class MdmLinkExpandSvc { */ public Set expandMdmBySourceResourcePid(Long theSourceResourcePid) { ourLog.debug("About to expand source resource with PID {}", theSourceResourcePid); - List> goldenPidSourcePidTuples = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(theSourceResourcePid, MdmMatchResultEnum.MATCH); + List goldenPidSourcePidTuples = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(theSourceResourcePid, MdmMatchResultEnum.MATCH); Set flattenedPids = new HashSet<>(); - goldenPidSourcePidTuples.forEach(flattenedPids::addAll); + goldenPidSourcePidTuples.forEach(tuple -> { + flattenedPids.add(tuple.getSourcePid()); + flattenedPids.add(tuple.getGoldenPid()); + }); Set resourceIds = myIdHelperService.translatePidsToFhirResourceIds(flattenedPids); ourLog.debug("Pid {} has been expanded to [{}]", theSourceResourcePid, String.join(",", resourceIds)); return resourceIds; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java index 04333b578a7..46052fc1d52 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java @@ -72,12 +72,14 @@ public class MdmSearchExpandingInterceptorInterceptor { System.out.println("zoop"); theSearchParameterMap.values().stream() .flatMap(Collection::stream) - .filter(queryParam -> queryParam instanceof ReferenceParam) - .filter(referenceParam -> ((ReferenceParam) referenceParam).isMdmExpand()) + .flatMap(Collection::stream) + .filter(param -> param instanceof ReferenceParam) .map(untypedParam -> (ReferenceParam)untypedParam) + .filter(ReferenceParam::isMdmExpand) .forEach(mdmReferenceParam -> { - System.out.println("zoop"); - System.out.println(mdmReferenceParam.toString()); + Set strings = myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(mdmReferenceParam.getValue())); + System.out.println(String.join(",", strings)); + //TODO in AM, start here with a test that actually has an expansion to expand against. }); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java index 59b1d653154..3453eddc465 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java @@ -58,6 +58,8 @@ import java.util.stream.Collectors; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java index cd3978133b8..ae034af6fa7 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.mdm.dao; +import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; @@ -78,14 +79,14 @@ public class MdmLinkDaoSvcTest extends BaseMdmR4Test { List expectedExpandedPids = mdmLinks.stream().map(MdmLink::getSourcePid).collect(Collectors.toList()); //SUT - List> lists = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(mdmLinks.get(0).getSourcePid(), MdmMatchResultEnum.MATCH); + List lists = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(mdmLinks.get(0).getSourcePid(), MdmMatchResultEnum.MATCH); assertThat(lists, hasSize(10)); lists.stream() - .forEach(pair -> { - assertThat(pair.get(0), is(equalTo(golden.getIdElement().getIdPartAsLong()))); - assertThat(pair.get(1), is(in(expectedExpandedPids))); + .forEach(tuple -> { + assertThat(tuple.getGoldenPid(), is(equalTo(golden.getIdElement().getIdPartAsLong()))); + assertThat(tuple.getSourcePid(), is(in(expectedExpandedPids))); }); } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java index 051bf27c546..c06f0db2c7f 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java @@ -69,11 +69,10 @@ public class MdmStorageInterceptorIT extends BaseMdmR4Test { @Test public void testSearchExpandingInterceptorWorks() { - SearchParameterMap subject = new SearchParameterMap("subject", new ReferenceParam("Patient/123").setMdmExpand(true)).setLoadSynchronous(false); + SearchParameterMap subject = new SearchParameterMap("subject", new ReferenceParam("Patient/123").setMdmExpand(true)).setLoadSynchronous(true); myObservationDao.search(subject); } - @Test public void testDeleteGoldenResourceDeletesLinks() throws InterruptedException { myMdmHelper.createWithLatch(buildPaulPatient()); From 419a5829ea5cb2b958c0911d61750a2005a6bc83 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 30 Mar 2021 12:39:37 -0400 Subject: [PATCH 09/61] This shockingly works --- .../ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java | 2 +- ...SearchExpandingInterceptorInterceptor.java | 58 ++- .../MdmSearchExpandingInterceptorIT.java | 348 ++++++++++++++++++ 3 files changed, 377 insertions(+), 31 deletions(-) create mode 100644 hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java index 653c229727b..16834facbe2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkDao.java @@ -58,7 +58,7 @@ public interface IMdmLinkDao extends JpaRepository { Long getSourcePid(); } - @Query("SELECT ml.myGoldenResourcePid, ml.mySourcePid " + + @Query("SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid " + "FROM MdmLink ml " + "INNER JOIN MdmLink ml2 " + "on ml.myGoldenResourcePid=ml2.myGoldenResourcePid " + diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java index 46052fc1d52..d7fabc86763 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java @@ -20,35 +20,22 @@ package ca.uhn.fhir.jpa.interceptor; * #L% */ -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; -import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.mdm.MdmLinkExpandSvc; -import ca.uhn.fhir.jpa.search.helper.SearchParamHelper; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.mdm.log.Logs; -import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.ReferenceParam; -import ca.uhn.fhir.rest.param.StringParam; -import ca.uhn.fhir.util.ClasspathUtil; -import org.apache.commons.lang3.Validate; -import org.hl7.fhir.instance.model.api.IBaseConformance; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; import static org.slf4j.LoggerFactory.getLogger; @@ -62,24 +49,35 @@ public class MdmSearchExpandingInterceptorInterceptor { @Autowired private MdmLinkExpandSvc myMdmLinkExpandSvc; - @Autowired - private FhirContext myFhirContext; - @Autowired - private IdHelperService myIdHelperService; @Hook(Pointcut.STORAGE_PRESEARCH_REGISTERED) - public void hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) { - System.out.println("zoop"); - theSearchParameterMap.values().stream() - .flatMap(Collection::stream) - .flatMap(Collection::stream) - .filter(param -> param instanceof ReferenceParam) - .map(untypedParam -> (ReferenceParam)untypedParam) - .filter(ReferenceParam::isMdmExpand) - .forEach(mdmReferenceParam -> { - Set strings = myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(mdmReferenceParam.getValue())); - System.out.println(String.join(",", strings)); - //TODO in AM, start here with a test that actually has an expansion to expand against. - }); + public void hook(SearchParameterMap theSearchParameterMap) { + for (List> andList : theSearchParameterMap.values()) { + for (List orList : andList) { + expandAnyReferenceParameters(orList); + } + } + } + + /** + * If a Parameter is a reference parameter, and it has been set to expand MDM, perform the expansion. + */ + private void expandAnyReferenceParameters(List orList) { + List toRemove = new ArrayList<>(); + List toAdd = new ArrayList<>(); + for (IQueryParameterType iQueryParameterType : orList) { + if (iQueryParameterType instanceof ReferenceParam) { + ReferenceParam refParam = (ReferenceParam) iQueryParameterType; + if (refParam.isMdmExpand()) { + Set strings = myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(refParam.getValue())); + if (!strings.isEmpty()) { + toRemove.add(refParam); + strings.stream().map(resourceId -> new ReferenceParam(refParam.getResourceType() + "/" + resourceId)).forEach(toAdd::add); + } + } + } + } + orList.removeAll(toRemove); + orList.addAll(toAdd); } } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java new file mode 100644 index 00000000000..69a87e09de4 --- /dev/null +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java @@ -0,0 +1,348 @@ +package ca.uhn.fhir.jpa.mdm.interceptor; + +import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; +import ca.uhn.fhir.jpa.dao.index.IdHelperService; +import ca.uhn.fhir.jpa.entity.MdmLink; +import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test; +import ca.uhn.fhir.jpa.mdm.helper.MdmHelperConfig; +import ca.uhn.fhir.jpa.mdm.helper.MdmHelperR4; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.mdm.model.CanonicalEID; +import ca.uhn.fhir.mdm.rules.config.MdmSettings; +import ca.uhn.fhir.model.dstu2.resource.Observation; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import ca.uhn.fhir.rest.param.ReferenceOrListParam; +import ca.uhn.fhir.rest.param.ReferenceParam; +import ca.uhn.fhir.rest.param.StringParam; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.server.TransactionLogMessages; +import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; +import org.hl7.fhir.instance.model.api.IAnyResource; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.Enumerations; +import org.hl7.fhir.r4.model.Medication; +import org.hl7.fhir.r4.model.Organization; +import org.hl7.fhir.r4.model.Patient; +import org.hl7.fhir.r4.model.SearchParameter; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.slf4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.ContextConfiguration; + +import java.util.Date; +import java.util.List; + +import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD; +import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD_REDIRECTED; +import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_HAPI_MDM_MANAGED; +import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS; +import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_MDM_MANAGED; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; +import static org.slf4j.LoggerFactory.getLogger; + +@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) +@ContextConfiguration(classes = {MdmHelperConfig.class}) +public class MdmSearchExpandingInterceptorIT extends BaseMdmR4Test { + + private static final Logger ourLog = getLogger(MdmSearchExpandingInterceptorIT.class); + + @RegisterExtension + @Autowired + public MdmHelperR4 myMdmHelper; + @Autowired + private IdHelperService myIdHelperService; + + @Test + public void testCreatePractitioner() throws InterruptedException { + MdmHelperR4.OutcomeAndLogMessageWrapper withLatch = myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + IIdType id = withLatch.getDaoMethodOutcome().getId(); + myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + assertLinkCount(4); + + + + SearchParameterMap searchParameterMap = new SearchParameterMap(); + searchParameterMap.setLoadSynchronous(true); + ReferenceOrListParam referenceOrListParam = new ReferenceOrListParam(); + referenceOrListParam.addOr(new ReferenceParam(id.toVersionless()).setMdmExpand(true)); + referenceOrListParam.addOr(new ReferenceParam(id.toVersionless())); + referenceOrListParam.addOr(new ReferenceParam(id.toVersionless())); + searchParameterMap.add(Observation.SP_SUBJECT, referenceOrListParam); + searchParameterMap.add(Observation.SP_CATEGORY, new TokenParam("test-1", "test-2")); + searchParameterMap.add(Observation.SP_ENCOUNTER, new ReferenceParam("Encounter/abc")); + + + myObservationDao.search(searchParameterMap); + + } + + @Test + public void testSearchExpandingInterceptorWorks() { + SearchParameterMap subject = new SearchParameterMap("subject", new ReferenceParam("Patient/123").setMdmExpand(true)).setLoadSynchronous(true); + myObservationDao.search(subject); + } + + @Test + public void testDeleteGoldenResourceDeletesLinks() throws InterruptedException { + myMdmHelper.createWithLatch(buildPaulPatient()); + assertLinkCount(1); + Patient sourcePatient = getOnlyGoldenPatient(); + myPatientDao.delete(sourcePatient.getIdElement()); + assertLinkCount(0); + } + + @Test + public void testCreatePatientWithMdmTagForbidden() throws InterruptedException { + //Creating a golden resource with the MDM-MANAGED tag should fail + Patient patient = new Patient(); + patient.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); + try { + myMdmHelper.doCreateResource(patient, true); + fail(); + } catch (ForbiddenOperationException e) { + assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); + } + } + + @Test + public void testCreatePatientWithGoldenRecordTagForbidden() throws InterruptedException { + Patient patient = new Patient(); + patient.getMeta().addTag(SYSTEM_GOLDEN_RECORD_STATUS, CODE_GOLDEN_RECORD, "Golden Record"); + try { + myMdmHelper.doCreateResource(patient, true); + fail(); + } catch (ForbiddenOperationException e) { + assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); + } + } + + @Test + public void testCreateMedicationWithGoldenRecordRedirectTagForbidden() throws InterruptedException { + Medication medication = new Medication(); + medication.getMeta().addTag(SYSTEM_GOLDEN_RECORD_STATUS, CODE_GOLDEN_RECORD_REDIRECTED, "Golden Record"); + try { + myMdmHelper.doCreateResource(medication, true); + fail(); + } catch (ForbiddenOperationException e) { + assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); + } + } + + @Test + public void testCreatingGoldenResourceWithInsufficentMDMAttributesIsNotMDMProcessed() throws InterruptedException { + myMdmHelper.doCreateResource(new Patient(), true); + assertLinkCount(0); + } + + @Test + public void testCreatingPatientWithOneOrMoreMatchingAttributesIsMDMProcessed() throws InterruptedException { + myMdmHelper.createWithLatch(buildPaulPatient()); + assertLinkCount(1); + } + + @Test + public void testCreateOrganizationWithMdmTagForbidden() throws InterruptedException { + //Creating a organization with the MDM-MANAGED tag should fail + Organization organization = new Organization(); + organization.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); + try { + myMdmHelper.doCreateResource(organization, true); + fail(); + } catch (ForbiddenOperationException e) { + assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); + } + } + + @Test + public void testUpdateOrganizationWithMdmTagForbidden() throws InterruptedException { + //Creating a organization with the MDM-MANAGED tag should fail + Organization organization = new Organization(); + myMdmHelper.doCreateResource(organization, true); + organization.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); + try { + myMdmHelper.doUpdateResource(organization, true); + fail(); + } catch (ForbiddenOperationException e) { + assertEquals("The HAPI-MDM tag on a resource may not be changed once created.", e.getMessage()); + } + } + + @Test + public void testGoldenResourceRecordsManagedByMdmAllShareSameTag() throws InterruptedException { + myMdmHelper.createWithLatch(buildJanePatient()); + myMdmHelper.createWithLatch(buildPaulPatient()); + + //TODO GGG MDM: this test is out of date, since we now are using golden record Patients + IBundleProvider search = myPatientDao.search(buildGoldenResourceSearchParameterMap()); + List resources = search.getResources(0, search.size()); + + for (IBaseResource r : resources) { + assertThat(r.getMeta().getTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED), is(notNullValue())); + } + } + + @Test + public void testNonMdmManagedGoldenResourceCannotHaveMdmManagedTagAddedToThem() { + // GoldenResource created manually. + Patient patient = new Patient(); + DaoMethodOutcome daoMethodOutcome = myMdmHelper.doCreateResource(patient, true); + assertNotNull(daoMethodOutcome.getId()); + + //Updating that patient to set them as MDM managed is not allowed. + patient.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); + try { + myMdmHelper.doUpdateResource(patient, true); + fail(); + } catch (ForbiddenOperationException e) { + assertEquals("The HAPI-MDM tag on a resource may not be changed once created.", e.getMessage()); + } + } + + @Test + public void testMdmManagedGoldenResourceCannotBeModifiedByGoldenResourceUpdateRequest() throws InterruptedException { + // When MDM is enabled, only the MDM system is allowed to modify GoldenResource links of GoldenResources with the MDM-MANAGED tag. + Patient patient = new Patient(); + IIdType patientId = myMdmHelper.createWithLatch(buildPaulPatient()).getDaoMethodOutcome().getId().toUnqualifiedVersionless(); + + patient.setId(patientId); + + // Updating a Golden Resource Patient who was created via MDM should fail. + MdmLink mdmLink = myMdmLinkDaoSvc.getMatchedLinkForSourcePid(myIdHelperService.getPidOrNull(patient)).get(); + Long sourcePatientPid = mdmLink.getGoldenResourcePid(); + Patient goldenResourcePatient = (Patient) myPatientDao.readByPid(new ResourcePersistentId(sourcePatientPid)); + goldenResourcePatient.setGender(Enumerations.AdministrativeGender.MALE); + try { + myMdmHelper.doUpdateResource(goldenResourcePatient, true); + fail(); + } catch (ForbiddenOperationException e) { + assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); + } + } + + @Test + public void testMdmPointcutReceivesTransactionLogMessages() throws InterruptedException { + MdmHelperR4.OutcomeAndLogMessageWrapper wrapper = myMdmHelper.createWithLatch(buildJanePatient()); + + TransactionLogMessages mdmTransactionLogMessages = wrapper.getLogMessages(); + + //There is no TransactionGuid here as there is no TransactionLog in this context. + assertThat(mdmTransactionLogMessages.getTransactionGuid(), is(nullValue())); + + List messages = mdmTransactionLogMessages.getValues(); + assertThat(messages.isEmpty(), is(false)); + } + + @Test + public void testWhenASingularPatientUpdatesExternalEidThatGoldenResourceEidIsUpdated() throws InterruptedException { + Patient jane = addExternalEID(buildJanePatient(), "some_eid"); + MdmHelperR4.OutcomeAndLogMessageWrapper latch = myMdmHelper.createWithLatch(jane); + jane.setId(latch.getDaoMethodOutcome().getId()); + clearExternalEIDs(jane); + jane = addExternalEID(jane, "some_new_eid"); + + MdmHelperR4.OutcomeAndLogMessageWrapper outcomeWrapper = myMdmHelper.updateWithLatch(jane); + IAnyResource patient = getGoldenResourceFromTargetResource(jane); + List externalEids = myEIDHelper.getExternalEid(patient); + assertThat(externalEids, hasSize(1)); + assertThat("some_new_eid", is(equalTo(externalEids.get(0).getValue()))); + } + + @Test + public void testWhenEidUpdatesAreDisabledForbidsUpdatesToEidsOnTargets() throws InterruptedException { + setPreventEidUpdates(true); + Patient jane = addExternalEID(buildJanePatient(), "some_eid"); + MdmHelperR4.OutcomeAndLogMessageWrapper latch = myMdmHelper.createWithLatch(jane); + jane.setId(latch.getDaoMethodOutcome().getId()); + clearExternalEIDs(jane); + jane = addExternalEID(jane, "some_new_eid"); + try { + myMdmHelper.doUpdateResource(jane, true); + fail(); + } catch (ForbiddenOperationException e) { + assertThat(e.getMessage(), is(equalTo("While running with EID updates disabled, EIDs may not be updated on source resources"))); + } + setPreventEidUpdates(false); + } + + @Test + public void testWhenMultipleEidsAreDisabledThatTheInterceptorRejectsCreatesWithThem() { + setPreventMultipleEids(true); + Patient patient = buildJanePatient(); + addExternalEID(patient, "123"); + addExternalEID(patient, "456"); + try { + myMdmHelper.doCreateResource(patient, true); + fail(); + } catch (ForbiddenOperationException e) { + assertThat(e.getMessage(), is(equalTo("While running with multiple EIDs disabled, source resources may have at most one EID."))); + } + + setPreventMultipleEids(false); + } + + @Test + public void testInterceptorHandlesNonMdmResources() { + setPreventEidUpdates(true); + + //Create some arbitrary resource. + SearchParameter fooSp = new SearchParameter(); + fooSp.setCode("foo"); + fooSp.addBase("Bundle"); + fooSp.setType(Enumerations.SearchParamType.REFERENCE); + fooSp.setTitle("FOO SP"); + fooSp.setExpression("Bundle.entry[0].resource.as(Composition).encounter"); + fooSp.setXpathUsage(SearchParameter.XPathUsageType.NORMAL); + fooSp.setStatus(Enumerations.PublicationStatus.ACTIVE); + + myMdmHelper.doCreateResource(fooSp, true); + fooSp.setXpathUsage(SearchParameter.XPathUsageType.PHONETIC); + myMdmHelper.doUpdateResource(fooSp, true); + } + + @Test + public void testPatientsWithNoEIDCanBeUpdated() throws InterruptedException { + setPreventEidUpdates(true); + Patient p = buildPaulPatient(); + MdmHelperR4.OutcomeAndLogMessageWrapper wrapper = myMdmHelper.createWithLatch(p); + + p.setId(wrapper.getDaoMethodOutcome().getId()); + p.setBirthDate(new Date()); + myMdmHelper.updateWithLatch(p); + setPreventEidUpdates(false); + } + + @Test + public void testPatientsCanHaveEIDAddedInStrictMode() throws InterruptedException { + setPreventEidUpdates(true); + Patient p = buildPaulPatient(); + MdmHelperR4.OutcomeAndLogMessageWrapper messageWrapper = myMdmHelper.createWithLatch(p); + p.setId(messageWrapper.getDaoMethodOutcome().getId()); + addExternalEID(p, "external eid"); + myMdmHelper.updateWithLatch(p); + setPreventEidUpdates(false); + } + + private void setPreventEidUpdates(boolean thePrevent) { + ((MdmSettings) myMdmSettings).setPreventEidUpdates(thePrevent); + } + + private void setPreventMultipleEids(boolean thePrevent) { + ((MdmSettings) myMdmSettings).setPreventMultipleEids(thePrevent); + } + +} From 02e65141768bfe614366e17599225007a86bd576 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 30 Mar 2021 12:57:09 -0400 Subject: [PATCH 10/61] Add modelconfig toggle --- .../fhir/jpa/model/entity/ModelConfig.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java index 9a70e967165..b327ffec14b 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java @@ -99,6 +99,7 @@ public class ModelConfig { private Map> myTypeToAutoVersionReferenceAtPaths = Collections.emptyMap(); private boolean myRespectVersionsForSearchIncludes; private boolean myIndexOnContainedResources = false; + private boolean myAllowMdmExpansion = false; /** * Constructor @@ -159,6 +160,24 @@ public class ModelConfig { return myAllowContainsSearches; } + /** + * If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters. + * This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier + * will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1 + * is MDM-matched to Patient/2 and you execute the search: + * Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2. + *

    + * Default is false + *

    + * @since 5.4.0 + */ + public boolean isAllowMdmExpansion() { + return myAllowMdmExpansion; + } + public void setAllowMdmExpansion(boolean theAllowMdmExpansion) { + myAllowMdmExpansion = theAllowMdmExpansion; + } + /** * If enabled, the server will support the use of :contains searches, * which are helpful but can have adverse effects on performance. From a044e8c4eb16fccc520c81dbd36316777af45feb Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 30 Mar 2021 12:59:29 -0400 Subject: [PATCH 11/61] Add a delegator in daoconfig --- .../ca/uhn/fhir/jpa/api/config/DaoConfig.java | 30 +++++++++++++++++++ .../fhir/jpa/model/entity/ModelConfig.java | 12 ++++++++ 2 files changed, 42 insertions(+) diff --git a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java index 8c0f27d6a8b..6353d0a55b9 100644 --- a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java +++ b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java @@ -1716,6 +1716,36 @@ public class DaoConfig { this.myModelConfig.setAllowContainsSearches(theAllowContainsSearches); } + /** + * If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters. + * This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier + * will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1 + * is MDM-matched to Patient/2 and you execute the search: + * Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2. + *

    + * Default is false + *

    + * @since 5.4.0 + */ + public boolean isAllowMdmExpansion() { + return myModelConfig.isAllowMdmExpansion(); + } + + /** + * If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters. + * This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier + * will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1 + * is MDM-matched to Patient/2 and you execute the search: + * Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2. + *

    + * Default is false + *

    + * @since 5.4.0 + */ + public void setAllowMdmExpansion(boolean theAllowMdmExpansion) { + myModelConfig.setAllowMdmExpansion(theAllowMdmExpansion); + } + /** * This setting may be used to advise the server that any references found in * resources that have any of the base URLs given here will be replaced with diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java index b327ffec14b..4101d2504b7 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java @@ -174,6 +174,18 @@ public class ModelConfig { public boolean isAllowMdmExpansion() { return myAllowMdmExpansion; } + + /** + * If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters. + * This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier + * will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1 + * is MDM-matched to Patient/2 and you execute the search: + * Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2. + *

    + * Default is false + *

    + * @since 5.4.0 + */ public void setAllowMdmExpansion(boolean theAllowMdmExpansion) { myAllowMdmExpansion = theAllowMdmExpansion; } From 8b69d161bd7ede67a879b08246e44e654838ae2d Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 30 Mar 2021 17:12:55 -0400 Subject: [PATCH 12/61] Update docs --- .../uhn/fhir/rest/param/ReferenceParam.java | 2 + .../fhir/docs/server_jpa_mdm/mdm_expansion.md | 28 ++ .../ca/uhn/fhir/jpa/config/BaseConfig.java | 7 +- ...ava => MdmSearchExpandingInterceptor.java} | 23 +- .../jpa/mdm/config/MdmConsumerConfig.java | 6 +- .../MdmSubmitterInterceptorLoader.java | 4 +- .../MdmSearchExpandingInterceptorIT.java | 338 ++---------------- 7 files changed, 92 insertions(+), 316 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/{MdmSearchExpandingInterceptorInterceptor.java => MdmSearchExpandingInterceptor.java} (73%) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java index 583edaa6731..32cdaf427d0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java @@ -127,7 +127,9 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ myMdmExpand = true; theQualifier = ""; //TODO GGG i probably have to deal with chaining here? like refusing the mdm qualifier if i can detect its chained? + //TODO GGG just throw an error if they try to chain } + String q = theQualifier; if (isNotBlank(q)) { if (q.startsWith(":")) { diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md new file mode 100644 index 00000000000..c4766cc71d1 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md @@ -0,0 +1,28 @@ +# MDM Expansion + +Once you have MDM enabled, and you have many linked resources, it can be useful to search across all resourcess. Let's say you have the following MDM links in your database: +``` +Patient/1 --> Patient/3 +Patient/2 --> Patient/3 +``` +This indicates that both Patient/1 and Patient/2 are MDM-mathed to the same golden resource (Patient/3). +What if you want to get all observations from Patient/1, but also include any observations from all of their linked resources. You could do this by first querying the [$mdm-query-links]() endpoint, and then making a subsequent call like the following +```http request +GET http://example.com:8000/Observation?subject=Patient/1,Patient/2,Patient/3 +``` + +But HAPI-FHIR allows a shorthand for this, by means of a Search Parameter qualifier, as follows: +```http request +GET http://example.com:8000/Observation?subject:mdm=Patient/1 +``` + +This `:mdm` parameter qualifier instructs an interceptor in HAPI fhir to expand the set of resources included in the search by their MDM-matched resources. The two above HTTP requests will return the same result. + +## Enabling MDM Expansion + +On top of needing to instantiate an MDM module, you must enable in the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) property. + +
    +It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. +
    + diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index 4d40bf04fde..3432af617ce 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -61,7 +61,7 @@ import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.graphql.JpaStorageServices; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; -import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptorInterceptor; +import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor; import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor; import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor; import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder; @@ -476,8 +476,9 @@ public abstract class BaseConfig { } @Bean - public MdmSearchExpandingInterceptorInterceptor mdmSearchExpandingInterceptorInterceptor() { - return new MdmSearchExpandingInterceptorInterceptor(); + @Lazy + public MdmSearchExpandingInterceptor mdmSearchExpandingInterceptorInterceptor() { + return new MdmSearchExpandingInterceptor(); } @Bean diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptor.java similarity index 73% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptor.java index d7fabc86763..63f3d3574bc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptorInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptor.java @@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.interceptor; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.dao.mdm.MdmLinkExpandSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.mdm.log.Logs; @@ -30,6 +31,7 @@ import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.ReferenceParam; +import joptsimple.internal.Strings; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; @@ -44,17 +46,22 @@ import static org.slf4j.LoggerFactory.getLogger; * by the HAPI FHIR Server with a static hard-coded resource. */ @Interceptor -public class MdmSearchExpandingInterceptorInterceptor { +public class MdmSearchExpandingInterceptor { private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); @Autowired private MdmLinkExpandSvc myMdmLinkExpandSvc; + @Autowired + private DaoConfig myDaoConfig; + @Hook(Pointcut.STORAGE_PRESEARCH_REGISTERED) public void hook(SearchParameterMap theSearchParameterMap) { - for (List> andList : theSearchParameterMap.values()) { - for (List orList : andList) { - expandAnyReferenceParameters(orList); + if (myDaoConfig.isAllowMdmExpansion()) { + for (List> andList : theSearchParameterMap.values()) { + for (List orList : andList) { + expandAnyReferenceParameters(orList); + } } } } @@ -69,10 +76,12 @@ public class MdmSearchExpandingInterceptorInterceptor { if (iQueryParameterType instanceof ReferenceParam) { ReferenceParam refParam = (ReferenceParam) iQueryParameterType; if (refParam.isMdmExpand()) { - Set strings = myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(refParam.getValue())); - if (!strings.isEmpty()) { + ourLog.debug("Found a reference parameter to expand: {}", refParam.toString()); + Set expandedResourceIds = myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(refParam.getValue())); + if (!expandedResourceIds.isEmpty()) { + ourLog.debug("Parameter has been expanded to: {}", String.join(", ", expandedResourceIds)); toRemove.add(refParam); - strings.stream().map(resourceId -> new ReferenceParam(refParam.getResourceType() + "/" + resourceId)).forEach(toAdd::add); + expandedResourceIds.stream().map(resourceId -> new ReferenceParam(refParam.getResourceType() + "/" + resourceId)).forEach(toAdd::add); } } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java index 3ebb0dc6f4c..03d4aacd491 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java @@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.mdm.config; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptorInterceptor; +import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor; import ca.uhn.fhir.jpa.mdm.svc.MdmSurvivorshipSvcImpl; import ca.uhn.fhir.mdm.api.IMdmControllerSvc; import ca.uhn.fhir.mdm.api.IMdmExpungeSvc; @@ -81,8 +81,8 @@ public class MdmConsumerConfig { } @Bean - MdmSearchExpandingInterceptorInterceptor myMdmSearchExpandingInterceptorInterceptor() { - return new MdmSearchExpandingInterceptorInterceptor(); + MdmSearchExpandingInterceptor myMdmSearchExpandingInterceptorInterceptor() { + return new MdmSearchExpandingInterceptor(); } @Bean diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java index 5d9360bb62d..912d39331ea 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.mdm.interceptor; * #L% */ -import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptorInterceptor; +import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.interceptor.api.IInterceptorService; @@ -42,7 +42,7 @@ public class MdmSubmitterInterceptorLoader { @Autowired private IMdmStorageInterceptor myIMdmStorageInterceptor; @Autowired - private MdmSearchExpandingInterceptorInterceptor myMdmSearchExpandingInterceptorInterceptor; + private MdmSearchExpandingInterceptor myMdmSearchExpandingInterceptorInterceptor; @Autowired private IInterceptorService myInterceptorService; @Autowired diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java index 69a87e09de4..f1f5eeb2216 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java @@ -1,31 +1,17 @@ package ca.uhn.fhir.jpa.mdm.interceptor; +import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; -import ca.uhn.fhir.jpa.dao.index.IdHelperService; -import ca.uhn.fhir.jpa.entity.MdmLink; import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test; import ca.uhn.fhir.jpa.mdm.helper.MdmHelperConfig; import ca.uhn.fhir.jpa.mdm.helper.MdmHelperR4; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.mdm.model.CanonicalEID; -import ca.uhn.fhir.mdm.rules.config.MdmSettings; -import ca.uhn.fhir.model.dstu2.resource.Observation; import ca.uhn.fhir.rest.api.server.IBundleProvider; -import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.ReferenceOrListParam; import ca.uhn.fhir.rest.param.ReferenceParam; -import ca.uhn.fhir.rest.param.StringParam; -import ca.uhn.fhir.rest.param.TokenParam; -import ca.uhn.fhir.rest.server.TransactionLogMessages; -import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; -import org.hl7.fhir.instance.model.api.IAnyResource; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.r4.model.Enumerations; -import org.hl7.fhir.r4.model.Medication; -import org.hl7.fhir.r4.model.Organization; -import org.hl7.fhir.r4.model.Patient; -import org.hl7.fhir.r4.model.SearchParameter; +import org.hl7.fhir.r4.model.CodeableConcept; +import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.Reference; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import org.slf4j.Logger; @@ -33,24 +19,9 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; -import java.util.Date; -import java.util.List; - -import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD; -import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD_REDIRECTED; -import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_HAPI_MDM_MANAGED; -import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS; -import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_MDM_MANAGED; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.fail; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.slf4j.LoggerFactory.getLogger; @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) @@ -63,286 +34,51 @@ public class MdmSearchExpandingInterceptorIT extends BaseMdmR4Test { @Autowired public MdmHelperR4 myMdmHelper; @Autowired - private IdHelperService myIdHelperService; + private DaoConfig myDaoConfig; @Test - public void testCreatePractitioner() throws InterruptedException { + public void testReferenceExpansionWorks() throws InterruptedException { + myDaoConfig.setAllowMdmExpansion(false); MdmHelperR4.OutcomeAndLogMessageWrapper withLatch = myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); - IIdType id = withLatch.getDaoMethodOutcome().getId(); - myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); - myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); - myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + MdmHelperR4.OutcomeAndLogMessageWrapper withLatch1 = myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + MdmHelperR4.OutcomeAndLogMessageWrapper withLatch2 = myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + MdmHelperR4.OutcomeAndLogMessageWrapper withLatch3 = myMdmHelper.createWithLatch(addExternalEID(buildJanePatient(), "123")); + assertLinkCount(4); + String id = withLatch.getDaoMethodOutcome().getId().getIdPart(); + String id1 = withLatch1.getDaoMethodOutcome().getId().getIdPart(); + String id2 = withLatch2.getDaoMethodOutcome().getId().getIdPart(); + String id3 = withLatch3.getDaoMethodOutcome().getId().getIdPart(); + //Create an Observation for each Patient + createObservationWithSubject(id); + createObservationWithSubject(id1); + createObservationWithSubject(id2); + createObservationWithSubject(id3); SearchParameterMap searchParameterMap = new SearchParameterMap(); searchParameterMap.setLoadSynchronous(true); ReferenceOrListParam referenceOrListParam = new ReferenceOrListParam(); - referenceOrListParam.addOr(new ReferenceParam(id.toVersionless()).setMdmExpand(true)); - referenceOrListParam.addOr(new ReferenceParam(id.toVersionless())); - referenceOrListParam.addOr(new ReferenceParam(id.toVersionless())); + referenceOrListParam.addOr(new ReferenceParam("Patient/" + id).setMdmExpand(true)); searchParameterMap.add(Observation.SP_SUBJECT, referenceOrListParam); - searchParameterMap.add(Observation.SP_CATEGORY, new TokenParam("test-1", "test-2")); - searchParameterMap.add(Observation.SP_ENCOUNTER, new ReferenceParam("Encounter/abc")); + //With MDM Expansion disabled, this should return 1 result. + IBundleProvider search = myObservationDao.search(searchParameterMap); + assertThat(search.size(), is(equalTo(1))); - myObservationDao.search(searchParameterMap); - + //Once MDM Expansion is allowed, this should now return 4 resourecs. + myDaoConfig.setAllowMdmExpansion(true); + search = myObservationDao.search(searchParameterMap); + assertThat(search.size(), is(equalTo(4))); } - @Test - public void testSearchExpandingInterceptorWorks() { - SearchParameterMap subject = new SearchParameterMap("subject", new ReferenceParam("Patient/123").setMdmExpand(true)).setLoadSynchronous(true); - myObservationDao.search(subject); + private Observation createObservationWithSubject(String thePatientId) { + Observation observation = new Observation(); + observation.setSubject(new Reference("Patient/" + thePatientId)); + observation.setCode(new CodeableConcept().setText("Made for Patient/" + thePatientId)); + DaoMethodOutcome daoMethodOutcome = myObservationDao.create(observation); + return (Observation) daoMethodOutcome.getResource(); + } - - @Test - public void testDeleteGoldenResourceDeletesLinks() throws InterruptedException { - myMdmHelper.createWithLatch(buildPaulPatient()); - assertLinkCount(1); - Patient sourcePatient = getOnlyGoldenPatient(); - myPatientDao.delete(sourcePatient.getIdElement()); - assertLinkCount(0); - } - - @Test - public void testCreatePatientWithMdmTagForbidden() throws InterruptedException { - //Creating a golden resource with the MDM-MANAGED tag should fail - Patient patient = new Patient(); - patient.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); - try { - myMdmHelper.doCreateResource(patient, true); - fail(); - } catch (ForbiddenOperationException e) { - assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); - } - } - - @Test - public void testCreatePatientWithGoldenRecordTagForbidden() throws InterruptedException { - Patient patient = new Patient(); - patient.getMeta().addTag(SYSTEM_GOLDEN_RECORD_STATUS, CODE_GOLDEN_RECORD, "Golden Record"); - try { - myMdmHelper.doCreateResource(patient, true); - fail(); - } catch (ForbiddenOperationException e) { - assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); - } - } - - @Test - public void testCreateMedicationWithGoldenRecordRedirectTagForbidden() throws InterruptedException { - Medication medication = new Medication(); - medication.getMeta().addTag(SYSTEM_GOLDEN_RECORD_STATUS, CODE_GOLDEN_RECORD_REDIRECTED, "Golden Record"); - try { - myMdmHelper.doCreateResource(medication, true); - fail(); - } catch (ForbiddenOperationException e) { - assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); - } - } - - @Test - public void testCreatingGoldenResourceWithInsufficentMDMAttributesIsNotMDMProcessed() throws InterruptedException { - myMdmHelper.doCreateResource(new Patient(), true); - assertLinkCount(0); - } - - @Test - public void testCreatingPatientWithOneOrMoreMatchingAttributesIsMDMProcessed() throws InterruptedException { - myMdmHelper.createWithLatch(buildPaulPatient()); - assertLinkCount(1); - } - - @Test - public void testCreateOrganizationWithMdmTagForbidden() throws InterruptedException { - //Creating a organization with the MDM-MANAGED tag should fail - Organization organization = new Organization(); - organization.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); - try { - myMdmHelper.doCreateResource(organization, true); - fail(); - } catch (ForbiddenOperationException e) { - assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); - } - } - - @Test - public void testUpdateOrganizationWithMdmTagForbidden() throws InterruptedException { - //Creating a organization with the MDM-MANAGED tag should fail - Organization organization = new Organization(); - myMdmHelper.doCreateResource(organization, true); - organization.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); - try { - myMdmHelper.doUpdateResource(organization, true); - fail(); - } catch (ForbiddenOperationException e) { - assertEquals("The HAPI-MDM tag on a resource may not be changed once created.", e.getMessage()); - } - } - - @Test - public void testGoldenResourceRecordsManagedByMdmAllShareSameTag() throws InterruptedException { - myMdmHelper.createWithLatch(buildJanePatient()); - myMdmHelper.createWithLatch(buildPaulPatient()); - - //TODO GGG MDM: this test is out of date, since we now are using golden record Patients - IBundleProvider search = myPatientDao.search(buildGoldenResourceSearchParameterMap()); - List resources = search.getResources(0, search.size()); - - for (IBaseResource r : resources) { - assertThat(r.getMeta().getTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED), is(notNullValue())); - } - } - - @Test - public void testNonMdmManagedGoldenResourceCannotHaveMdmManagedTagAddedToThem() { - // GoldenResource created manually. - Patient patient = new Patient(); - DaoMethodOutcome daoMethodOutcome = myMdmHelper.doCreateResource(patient, true); - assertNotNull(daoMethodOutcome.getId()); - - //Updating that patient to set them as MDM managed is not allowed. - patient.getMeta().addTag(SYSTEM_MDM_MANAGED, CODE_HAPI_MDM_MANAGED, "User is managed by MDM"); - try { - myMdmHelper.doUpdateResource(patient, true); - fail(); - } catch (ForbiddenOperationException e) { - assertEquals("The HAPI-MDM tag on a resource may not be changed once created.", e.getMessage()); - } - } - - @Test - public void testMdmManagedGoldenResourceCannotBeModifiedByGoldenResourceUpdateRequest() throws InterruptedException { - // When MDM is enabled, only the MDM system is allowed to modify GoldenResource links of GoldenResources with the MDM-MANAGED tag. - Patient patient = new Patient(); - IIdType patientId = myMdmHelper.createWithLatch(buildPaulPatient()).getDaoMethodOutcome().getId().toUnqualifiedVersionless(); - - patient.setId(patientId); - - // Updating a Golden Resource Patient who was created via MDM should fail. - MdmLink mdmLink = myMdmLinkDaoSvc.getMatchedLinkForSourcePid(myIdHelperService.getPidOrNull(patient)).get(); - Long sourcePatientPid = mdmLink.getGoldenResourcePid(); - Patient goldenResourcePatient = (Patient) myPatientDao.readByPid(new ResourcePersistentId(sourcePatientPid)); - goldenResourcePatient.setGender(Enumerations.AdministrativeGender.MALE); - try { - myMdmHelper.doUpdateResource(goldenResourcePatient, true); - fail(); - } catch (ForbiddenOperationException e) { - assertThat(e.getMessage(), startsWith("Cannot create or modify Resources that are managed by MDM.")); - } - } - - @Test - public void testMdmPointcutReceivesTransactionLogMessages() throws InterruptedException { - MdmHelperR4.OutcomeAndLogMessageWrapper wrapper = myMdmHelper.createWithLatch(buildJanePatient()); - - TransactionLogMessages mdmTransactionLogMessages = wrapper.getLogMessages(); - - //There is no TransactionGuid here as there is no TransactionLog in this context. - assertThat(mdmTransactionLogMessages.getTransactionGuid(), is(nullValue())); - - List messages = mdmTransactionLogMessages.getValues(); - assertThat(messages.isEmpty(), is(false)); - } - - @Test - public void testWhenASingularPatientUpdatesExternalEidThatGoldenResourceEidIsUpdated() throws InterruptedException { - Patient jane = addExternalEID(buildJanePatient(), "some_eid"); - MdmHelperR4.OutcomeAndLogMessageWrapper latch = myMdmHelper.createWithLatch(jane); - jane.setId(latch.getDaoMethodOutcome().getId()); - clearExternalEIDs(jane); - jane = addExternalEID(jane, "some_new_eid"); - - MdmHelperR4.OutcomeAndLogMessageWrapper outcomeWrapper = myMdmHelper.updateWithLatch(jane); - IAnyResource patient = getGoldenResourceFromTargetResource(jane); - List externalEids = myEIDHelper.getExternalEid(patient); - assertThat(externalEids, hasSize(1)); - assertThat("some_new_eid", is(equalTo(externalEids.get(0).getValue()))); - } - - @Test - public void testWhenEidUpdatesAreDisabledForbidsUpdatesToEidsOnTargets() throws InterruptedException { - setPreventEidUpdates(true); - Patient jane = addExternalEID(buildJanePatient(), "some_eid"); - MdmHelperR4.OutcomeAndLogMessageWrapper latch = myMdmHelper.createWithLatch(jane); - jane.setId(latch.getDaoMethodOutcome().getId()); - clearExternalEIDs(jane); - jane = addExternalEID(jane, "some_new_eid"); - try { - myMdmHelper.doUpdateResource(jane, true); - fail(); - } catch (ForbiddenOperationException e) { - assertThat(e.getMessage(), is(equalTo("While running with EID updates disabled, EIDs may not be updated on source resources"))); - } - setPreventEidUpdates(false); - } - - @Test - public void testWhenMultipleEidsAreDisabledThatTheInterceptorRejectsCreatesWithThem() { - setPreventMultipleEids(true); - Patient patient = buildJanePatient(); - addExternalEID(patient, "123"); - addExternalEID(patient, "456"); - try { - myMdmHelper.doCreateResource(patient, true); - fail(); - } catch (ForbiddenOperationException e) { - assertThat(e.getMessage(), is(equalTo("While running with multiple EIDs disabled, source resources may have at most one EID."))); - } - - setPreventMultipleEids(false); - } - - @Test - public void testInterceptorHandlesNonMdmResources() { - setPreventEidUpdates(true); - - //Create some arbitrary resource. - SearchParameter fooSp = new SearchParameter(); - fooSp.setCode("foo"); - fooSp.addBase("Bundle"); - fooSp.setType(Enumerations.SearchParamType.REFERENCE); - fooSp.setTitle("FOO SP"); - fooSp.setExpression("Bundle.entry[0].resource.as(Composition).encounter"); - fooSp.setXpathUsage(SearchParameter.XPathUsageType.NORMAL); - fooSp.setStatus(Enumerations.PublicationStatus.ACTIVE); - - myMdmHelper.doCreateResource(fooSp, true); - fooSp.setXpathUsage(SearchParameter.XPathUsageType.PHONETIC); - myMdmHelper.doUpdateResource(fooSp, true); - } - - @Test - public void testPatientsWithNoEIDCanBeUpdated() throws InterruptedException { - setPreventEidUpdates(true); - Patient p = buildPaulPatient(); - MdmHelperR4.OutcomeAndLogMessageWrapper wrapper = myMdmHelper.createWithLatch(p); - - p.setId(wrapper.getDaoMethodOutcome().getId()); - p.setBirthDate(new Date()); - myMdmHelper.updateWithLatch(p); - setPreventEidUpdates(false); - } - - @Test - public void testPatientsCanHaveEIDAddedInStrictMode() throws InterruptedException { - setPreventEidUpdates(true); - Patient p = buildPaulPatient(); - MdmHelperR4.OutcomeAndLogMessageWrapper messageWrapper = myMdmHelper.createWithLatch(p); - p.setId(messageWrapper.getDaoMethodOutcome().getId()); - addExternalEID(p, "external eid"); - myMdmHelper.updateWithLatch(p); - setPreventEidUpdates(false); - } - - private void setPreventEidUpdates(boolean thePrevent) { - ((MdmSettings) myMdmSettings).setPreventEidUpdates(thePrevent); - } - - private void setPreventMultipleEids(boolean thePrevent) { - ((MdmSettings) myMdmSettings).setPreventMultipleEids(thePrevent); - } - } From 007ce8fc6fc7370e26f1b8cb54dc752fd5d81a04 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 30 Mar 2021 23:40:20 -0400 Subject: [PATCH 13/61] add a test forthe case where there are no mdm links --- .../MdmSearchExpandingInterceptorIT.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java index f1f5eeb2216..58d3abf4afa 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSearchExpandingInterceptorIT.java @@ -6,11 +6,13 @@ import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test; import ca.uhn.fhir.jpa.mdm.helper.MdmHelperConfig; import ca.uhn.fhir.jpa.mdm.helper.MdmHelperR4; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.mdm.api.MdmConstants; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.ReferenceOrListParam; import ca.uhn.fhir.rest.param.ReferenceParam; import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Reference; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -73,6 +75,22 @@ public class MdmSearchExpandingInterceptorIT extends BaseMdmR4Test { assertThat(search.size(), is(equalTo(4))); } + @Test + public void testReferenceExpansionQuietlyFailsOnMissingMdmMatches() { + myDaoConfig.setAllowMdmExpansion(true); + Patient patient = buildJanePatient(); + patient.getMeta().addTag(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_NO_MDM_MANAGED, "Don't MDM on me!"); + DaoMethodOutcome daoMethodOutcome = myMdmHelper.doCreateResource(patient, true); + String id = daoMethodOutcome.getId().getIdPart(); + createObservationWithSubject(id); + + //Even though the user has NO mdm links, that should not cause a request failure. + SearchParameterMap map = new SearchParameterMap(); + map.add(Observation.SP_SUBJECT, new ReferenceParam("Patient/" + id).setMdmExpand(true)); + IBundleProvider search = myObservationDao.search(map); + assertThat(search.size(), is(equalTo(1))); + } + private Observation createObservationWithSubject(String thePatientId) { Observation observation = new Observation(); observation.setSubject(new Reference("Patient/" + thePatientId)); From 696982ce1e2c12fc1912ddd9f0fff1b506405a47 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 31 Mar 2021 13:45:40 -0400 Subject: [PATCH 14/61] Fix bean name. Update docs --- .../ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md | 2 +- .../src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md index c4766cc71d1..6698087c463 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md @@ -6,7 +6,7 @@ Patient/1 --> Patient/3 Patient/2 --> Patient/3 ``` This indicates that both Patient/1 and Patient/2 are MDM-mathed to the same golden resource (Patient/3). -What if you want to get all observations from Patient/1, but also include any observations from all of their linked resources. You could do this by first querying the [$mdm-query-links]() endpoint, and then making a subsequent call like the following +What if you want to get all observations from Patient/1, but also include any observations from all of their linked resources. You could do this by first querying the [$mdm-query-links](/docs/server_jpa_mdm/mdm_operations.html) endpoint, and then making a subsequent call like the following ```http request GET http://example.com:8000/Observation?subject=Patient/1,Patient/2,Patient/3 ``` diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index 3432af617ce..437129e227d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -477,7 +477,7 @@ public abstract class BaseConfig { @Bean @Lazy - public MdmSearchExpandingInterceptor mdmSearchExpandingInterceptorInterceptor() { + public MdmSearchExpandingInterceptor mdmSearchExpandingInterceptor() { return new MdmSearchExpandingInterceptor(); } From 2ba3c919d58de0f2b6a76e2496473c84b9b26c5e Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 31 Mar 2021 14:18:57 -0400 Subject: [PATCH 15/61] Fix doGetQueryParameterQualifier on Mdm Expansion --- .../main/java/ca/uhn/fhir/rest/param/ReferenceParam.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java index 32cdaf427d0..765f25533f7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java @@ -92,8 +92,8 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ } } - @Override - String doGetQueryParameterQualifier() { + + private String defaultGetQueryParameterQualifier() { StringBuilder b = new StringBuilder(); if (isNotBlank(myChain)) { if (isNotBlank(getResourceType())) { @@ -108,6 +108,10 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ } return null; } + @Override + String doGetQueryParameterQualifier() { + return this.myMdmExpand != null ? ":mdm" : defaultGetQueryParameterQualifier(); + } @Override String doGetValueAsQueryToken(FhirContext theContext) { From f3c706d6564e220aeee4cf8b5bc04788cd83b3b0 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 31 Mar 2021 15:33:03 -0400 Subject: [PATCH 16/61] Add page for mdm expansion --- .../src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java | 2 -- .../src/main/resources/ca/uhn/hapi/fhir/docs/files.properties | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java index 765f25533f7..f30036a390b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java @@ -130,8 +130,6 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ if (Constants.PARAMQUALIFIER_MDM.equals(theQualifier)) { myMdmExpand = true; theQualifier = ""; - //TODO GGG i probably have to deal with chaining here? like refusing the mdm qualifier if i can detect its chained? - //TODO GGG just throw an error if they try to chain } String q = theQualifier; diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties index 6e4d038a21b..580cda79968 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties @@ -67,6 +67,7 @@ page.server_jpa_mdm.mdm_rules=MDM Rules page.server_jpa_mdm.mdm_eid=MDM Enterprise Identifiers page.server_jpa_mdm.mdm_operations=MDM Operations page.server_jpa_mdm.mdm_details=MDM Technical Details +page.server_jpa_mdm.mdm_expansion=MDM Search Expansion section.server_jpa_partitioning.title=JPA Server: Partitioning and Multitenancy page.server_jpa_partitioning.partitioning=Partitioning and Multitenancy From e5de34c51d8f32e2c7334f3263354a10fc4248c6 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 31 Mar 2021 16:08:09 -0400 Subject: [PATCH 17/61] Update docs --- .../ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md index 6698087c463..e4aa50a1989 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md @@ -1,7 +1,7 @@ # MDM Expansion -Once you have MDM enabled, and you have many linked resources, it can be useful to search across all resourcess. Let's say you have the following MDM links in your database: -``` +Once you have MDM enabled, and you have many linked resources, it can be useful to search across all linked resources. Let's say you have the following MDM links in your database: +```bash Patient/1 --> Patient/3 Patient/2 --> Patient/3 ``` @@ -18,9 +18,11 @@ GET http://example.com:8000/Observation?subject:mdm=Patient/1 This `:mdm` parameter qualifier instructs an interceptor in HAPI fhir to expand the set of resources included in the search by their MDM-matched resources. The two above HTTP requests will return the same result. +One important caveat is that chaining is currently not supported when using this prefix + ## Enabling MDM Expansion -On top of needing to instantiate an MDM module, you must enable in the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) property. +On top of needing to instantiate an MDM module, you must enable this feature in the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) property.
    It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. From d8866ec75f474d8382bc7deb0cc8cd6bd3e64ee4 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 31 Mar 2021 16:33:49 -0400 Subject: [PATCH 18/61] Add doc caveats --- .../ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md index e4aa50a1989..7acda791000 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md @@ -18,7 +18,7 @@ GET http://example.com:8000/Observation?subject:mdm=Patient/1 This `:mdm` parameter qualifier instructs an interceptor in HAPI fhir to expand the set of resources included in the search by their MDM-matched resources. The two above HTTP requests will return the same result. -One important caveat is that chaining is currently not supported when using this prefix +One important caveat is that chaining is currently not supported when using this prefix. ## Enabling MDM Expansion From 67fe940889fc771ec8d36d2513dfcb15f28335d9 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 1 Apr 2021 11:07:46 -0400 Subject: [PATCH 19/61] Update docs --- .../ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md index 7acda791000..51180269ded 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md @@ -18,13 +18,16 @@ GET http://example.com:8000/Observation?subject:mdm=Patient/1 This `:mdm` parameter qualifier instructs an interceptor in HAPI fhir to expand the set of resources included in the search by their MDM-matched resources. The two above HTTP requests will return the same result. + +
    One important caveat is that chaining is currently not supported when using this prefix. +
    ## Enabling MDM Expansion On top of needing to instantiate an MDM module, you must enable this feature in the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) property. -
    -It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. +
    +It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. Use with caution.
    From 383c55e33995246b21894af42687f4c8ae5a9b0f Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 1 Apr 2021 11:33:45 -0400 Subject: [PATCH 20/61] Add changelog --- .../5_4_0/2520-support-mdm-expansion-in-search.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2520-support-mdm-expansion-in-search.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2520-support-mdm-expansion-in-search.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2520-support-mdm-expansion-in-search.yaml new file mode 100644 index 00000000000..8f619d91752 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2520-support-mdm-expansion-in-search.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 2520 +title: "Add support for `:mdm` search parameter qualifier on reference search parameters. Details about enabling this feature +can be found [in the documentation](/hapi-fhir/docs/server_jpa_mdm/mdm_expansion.html)." From 961c996ea98c0604d5f3e754f7fdabfe2f7f5322 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 1 Apr 2021 17:32:14 -0400 Subject: [PATCH 21/61] Remove foolish local bind --- .../src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java index 222ecfe943c..d62a87b6818 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java @@ -112,8 +112,7 @@ public class TestR4Config extends BaseJavaConfigR4 { }; retVal.setDriver(new org.h2.Driver()); - retVal.setUrl("jdbc:h2:file:/home/tadgh/smile/hapi-fhir/testdb_r4.db"); -// retVal.setUrl("jdbc:h2:mem:testdb_r4"); + retVal.setUrl("jdbc:h2:mem:testdb_r4"); retVal.setMaxWaitMillis(30000); retVal.setUsername(""); retVal.setPassword(""); From b617c7690dd4dcaa939edbeed19f85447baba94b Mon Sep 17 00:00:00 2001 From: James Agnew Date: Mon, 5 Apr 2021 17:40:10 -0400 Subject: [PATCH 22/61] Search Parameter Infrastructure Cleanup (#2522) * Search param service cleanup * Work on collapsing search param * Search param cleanup * Work on build * Test fix * Test fixes * Ongoing work * Test fix * Compile fix * Test fixes * Test fix * Test fix * License header updates * Remove fixme * Cleanup * Cleanup --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- .../ca/uhn/fhir/context/ModelScanner.java | 33 +- .../uhn/fhir/context/RuntimeSearchParam.java | 89 +++-- .../ca/uhn/fhir/rest/param/ParameterUtil.java | 153 +-------- hapi-fhir-bom/pom.xml | 4 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml | 2 +- .../ca/uhn/fhir/jpa/demo/JpaServerDemo.java | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 8 +- hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jaxrsserver-example/pom.xml | 2 +- hapi-fhir-jpaserver-api/pom.xml | 2 +- hapi-fhir-jpaserver-base/pom.xml | 2 +- .../ca/uhn/fhir/jpa/dao/BaseStorageDao.java | 10 +- .../uhn/fhir/jpa/dao/LegacySearchBuilder.java | 11 +- ...rchParamWithInlineReferencesExtractor.java | 17 +- .../predicate/PredicateBuilderReference.java | 30 +- .../r4/FhirResourceDaoSearchParameterR4.java | 5 +- .../jpa/packages/PackageInstallerSvcImpl.java | 7 +- .../JpaCapabilityStatementProvider.java | 4 +- .../dstu3/JpaConformanceProviderDstu3.java | 8 +- .../fhir/jpa/search/builder/QueryStack.java | 303 +++++++++--------- .../jpa/search/builder/SearchBuilder.java | 23 +- .../ResourceLinkPredicateBuilder.java | 13 +- .../reindex/ResourceReindexingSvcImpl.java | 2 +- .../java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java | 5 +- .../fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java | 11 +- ...ceDaoDstu2SearchCustomSearchParamTest.java | 2 +- .../fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java | 3 +- ...esourceDaoDstu3PhoneticSearchNoFtTest.java | 2 +- ...ourceDaoR4SearchCustomSearchParamTest.java | 2 +- ...esourceDaoR4SearchWithElasticSearchIT.java | 2 +- ...urceDaoR4SearchWithLuceneDisabledTest.java | 4 +- ...sourceDaoR4TerminologyElasticsearchIT.java | 2 +- ...hirResourceDaoR4UniqueSearchParamTest.java | 19 +- .../dao/r4/SearchParamExtractorR4Test.java | 31 +- .../provider/ResourceProviderDstu2Test.java | 2 +- ...sourceProviderCustomSearchParamR4Test.java | 2 + .../r4/ResourceProviderHasParamR4Test.java | 2 +- .../ResourceReindexingSvcImplTest.java | 3 +- .../jpa/searchparam/MatchUrlServiceTest.java | 6 +- .../ValueSetExpansionR4ElasticsearchIT.java | 2 +- hapi-fhir-jpaserver-batch/pom.xml | 2 +- hapi-fhir-jpaserver-cql/pom.xml | 6 +- hapi-fhir-jpaserver-mdm/pom.xml | 6 +- .../jpa/mdm/config/MdmConsumerConfig.java | 4 +- .../jpa/mdm/config/MdmSubmitterConfig.java | 4 +- .../fhir/jpa/mdm/svc/MdmSearchParamSvc.java | 2 +- hapi-fhir-jpaserver-migrate/pom.xml | 2 +- hapi-fhir-jpaserver-model/pom.xml | 2 +- hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- .../fhir/jpa/cache/ResourceChangeEvent.java | 1 + .../searchparam/JpaRuntimeSearchParam.java | 88 ----- .../fhir/jpa/searchparam/MatchUrlService.java | 13 +- .../searchparam/config/SearchParamConfig.java | 2 +- .../extractor/BaseSearchParamExtractor.java | 2 +- .../extractor/SearchParamExtractorDstu2.java | 2 +- .../extractor/SearchParamExtractorDstu3.java | 2 +- .../extractor/SearchParamExtractorR4.java | 2 +- .../extractor/SearchParamExtractorR5.java | 2 +- .../SearchParamExtractorService.java | 5 +- .../matcher/InMemoryResourceMatcher.java | 2 +- .../registry/ISearchParamRegistry.java | 77 ----- .../ISearchParamRegistryController.java | 27 +- .../registry/JpaSearchParamCache.java | 61 ++-- .../registry/ReadOnlySearchParamCache.java | 76 ++--- .../registry/RuntimeSearchParamCache.java | 39 ++- .../registry/SearchParamRegistryImpl.java | 41 ++- .../SearchParameterCanonicalizer.java | 42 +-- .../jpa/searchparam/util/JpaParamUtil.java | 194 +++++++++++ .../fhir/jpa/searchparam/IndexStressTest.java | 2 +- .../SearchParamExtractorDstu3Test.java | 49 ++- .../SearchParamExtractorMegaTest.java | 30 +- .../InMemoryResourceMatcherR5Test.java | 13 +- .../registry/SearchParamRegistryImplTest.java | 3 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- .../match/registry/SubscriptionLoader.java | 2 +- .../WebsocketConnectionValidatorTest.java | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 4 +- .../ca/uhn/fhirtest/TestRestfulServer.java | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- .../mdm/rules/config/MdmRuleValidator.java | 8 +- .../test/java/ca/uhn/fhir/mdm/BaseR4Test.java | 4 +- .../mdm/rules/svc/ResourceMatcherR4Test.java | 3 - .../ca/uhn/fhir/mdm/svc/EIDHelperR4Test.java | 5 +- hapi-fhir-server/pom.xml | 2 +- .../server/RestfulServerConfiguration.java | 14 +- .../SearchPreferHandlingInterceptor.java | 12 +- .../ServerCapabilityStatementProvider.java | 26 +- .../server/util/ISearchParamRegistry.java | 106 ++++++ .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- .../fhir/context/FhirContextDstu3Test.java | 2 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- .../org/hl7/fhir/r4/model/ModelR4Test.java | 10 + hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 26 +- .../fhir/tinder/TinderJpaRestServerMojo.java | 10 +- .../fhir/tinder/model/SearchParameter.java | 4 +- .../parser/ResourceGeneratorUsingModel.java | 17 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 6 +- restful-server-example/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 133 files changed, 1008 insertions(+), 974 deletions(-) delete mode 100644 hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/JpaRuntimeSearchParam.java delete mode 100644 hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java rename hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRetriever.java => hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java (52%) create mode 100644 hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index a73013b3dea..24b70220c38 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index d6ab5abb4ed..7799cb47af2 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index 0854f8c8eb4..c5a761ca696 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java index fd60d389a42..69d7ed6272d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java @@ -394,41 +394,30 @@ class ModelScanner { b.append(" provides compartment membership but is not of type 'reference'"); ourLog.warn(b.toString()); continue; -// throw new ConfigurationException(b.toString()); } providesMembershipInCompartments.add(next.name()); } + List components = null; if (paramType == RestSearchParameterTypeEnum.COMPOSITE) { - compositeFields.put(nextField, searchParam); - continue; + components = new ArrayList<>(); + for (String next : searchParam.compositeOf()) { + String ref = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + next; + components.add(new RuntimeSearchParam.Component(null, ref)); + } } - Collection base = Collections.singletonList(theResourceDef.getName()); - RuntimeSearchParam param = new RuntimeSearchParam(null, null, searchParam.name(), searchParam.description(), searchParam.path(), paramType, null, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, base); + String url = null; + if (theResourceDef.isStandardType()) { + url = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + searchParam.name(); + } + RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, false, components, base); theResourceDef.addSearchParam(param); nameToParam.put(param.getName(), param); } } - for (Entry nextEntry : compositeFields.entrySet()) { - SearchParamDefinition searchParam = nextEntry.getValue(); - - List compositeOf = new ArrayList<>(); - for (String nextName : searchParam.compositeOf()) { - RuntimeSearchParam param = nameToParam.get(nextName); - if (param == null) { - ourLog.warn("Search parameter {}.{} declares that it is a composite with compositeOf value '{}' but that is not a valid parameter name itself. Valid values are: {}", - theResourceDef.getName(), searchParam.name(), nextName, nameToParam.keySet()); - continue; - } - compositeOf.add(param); - } - - RuntimeSearchParam param = new RuntimeSearchParam(null, null, searchParam.name(), searchParam.description(), searchParam.path(), RestSearchParameterTypeEnum.COMPOSITE, compositeOf, null, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE); - theResourceDef.addSearchParam(param); - } } private Set toTargetList(Class[] theTarget) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java index 9748925d0fc..8e795c32e1a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java @@ -46,7 +46,6 @@ import static org.apache.commons.lang3.StringUtils.trim; public class RuntimeSearchParam { private final IIdType myId; private final Set myBase; - private final List myCompositeOf; private final String myDescription; private final String myName; private final RestSearchParameterTypeEnum myParamType; @@ -56,21 +55,29 @@ public class RuntimeSearchParam { private final RuntimeSearchParamStatusEnum myStatus; private final String myUri; private final Map>> myExtensions = new HashMap<>(); + private final boolean myUnique; + private final List myComponents; private IPhoneticEncoder myPhoneticEncoder; /** * Constructor */ - public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, List theCompositeOf, - Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus) { - this(theId, theUri, theName, theDescription, thePath, theParamType, theCompositeOf, theProvidesMembershipInCompartments, theTargets, theStatus, null); + public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, + Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, Collection theBase) { + this(theId, theUri, theName, theDescription, thePath, theParamType, theProvidesMembershipInCompartments, theTargets, theStatus, false, Collections.emptyList(), theBase); + } + + /** + * Copy constructor + */ + public RuntimeSearchParam(RuntimeSearchParam theSp) { + this(theSp.getId(), theSp.getUri(), theSp.getName(), theSp.getDescription(), theSp.getPath(), theSp.getParamType(), theSp.getProvidesMembershipInCompartments(), theSp.getTargets(), theSp.getStatus(), theSp.isUnique(), theSp.getComponents(), theSp.getBase()); } /** * Constructor */ - public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, List theCompositeOf, - Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, Collection theBase) { + public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, boolean theUnique, List theComponents, Collection theBase) { super(); myId = theId; @@ -79,7 +86,6 @@ public class RuntimeSearchParam { myDescription = theDescription; myPath = thePath; myParamType = theParamType; - myCompositeOf = theCompositeOf; myStatus = theStatus; if (theProvidesMembershipInCompartments != null && !theProvidesMembershipInCompartments.isEmpty()) { myProvidesMembershipInCompartments = Collections.unmodifiableSet(theProvidesMembershipInCompartments); @@ -104,20 +110,20 @@ public class RuntimeSearchParam { } else { myBase = Collections.unmodifiableSet(new HashSet<>(theBase)); } + myUnique = theUnique; + if (theComponents != null) { + myComponents = Collections.unmodifiableList(theComponents); + } else { + myComponents = Collections.emptyList(); + } } - /** - * Constructor - */ - public RuntimeSearchParam(String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus) { - this(null, null, theName, theDescription, thePath, theParamType, null, theProvidesMembershipInCompartments, theTargets, theStatus); + public List getComponents() { + return myComponents; } - /** - * Copy constructor - */ - public RuntimeSearchParam(RuntimeSearchParam theSp) { - this(theSp.getId(), theSp.getUri(), theSp.getName(), theSp.getDescription(), theSp.getPath(), theSp.getParamType(), theSp.getCompositeOf(), theSp.getProvidesMembershipInCompartments(), theSp.getTargets(), theSp.getStatus(), theSp.getBase()); + public boolean isUnique() { + return myUnique; } /** @@ -205,10 +211,6 @@ public class RuntimeSearchParam { return myStatus; } - public List getCompositeOf() { - return myCompositeOf; - } - public String getDescription() { return myDescription; } @@ -247,13 +249,6 @@ public class RuntimeSearchParam { return myProvidesMembershipInCompartments; } - public enum RuntimeSearchParamStatusEnum { - ACTIVE, - DRAFT, - RETIRED, - UNKNOWN - } - public RuntimeSearchParam setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) { myPhoneticEncoder = thePhoneticEncoder; return this; @@ -265,4 +260,42 @@ public class RuntimeSearchParam { } return myPhoneticEncoder.encode(theString); } + + public enum RuntimeSearchParamStatusEnum { + ACTIVE, + DRAFT, + RETIRED, + UNKNOWN + } + + public static class Component { + private final String myExpression; + private final String myReference; + + /** + * Constructor + */ + public Component(String theExpression, String theReference) { + myExpression = theExpression; + myReference = theReference; + + } + + @Override + public String toString() { + return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) + .append("expression", myExpression) + .append("reference", myReference) + .toString(); + } + + public String getExpression() { + return myExpression; + } + + public String getReference() { + return myReference; + } + } + } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java index f980a91d060..132d7a76253 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java @@ -1,5 +1,19 @@ package ca.uhn.fhir.rest.param; +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.model.api.IQueryParameterOr; +import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.model.primitive.IntegerDt; +import ca.uhn.fhir.rest.annotation.IdParam; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.QualifiedParamList; +import ca.uhn.fhir.util.ReflectionUtil; +import ca.uhn.fhir.util.UrlUtil; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.instance.model.api.IPrimitiveType; + import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; @@ -8,25 +22,6 @@ import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.instance.model.api.IPrimitiveType; - -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.RuntimeSearchParam; -import ca.uhn.fhir.model.api.IQueryParameterAnd; -import ca.uhn.fhir.model.api.IQueryParameterOr; -import ca.uhn.fhir.model.api.IQueryParameterType; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.model.primitive.IntegerDt; -import ca.uhn.fhir.rest.annotation.IdParam; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.QualifiedParamList; -import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; -import ca.uhn.fhir.rest.param.binder.QueryParameterAndBinder; -import ca.uhn.fhir.util.ReflectionUtil; -import ca.uhn.fhir.util.UrlUtil; - /* * #%L * HAPI FHIR - Core Library @@ -59,126 +54,6 @@ public class ParameterUtil { return (T) value; } - /** - * This is a utility method intended provided to help the JPA module. - */ - public static IQueryParameterAnd parseQueryParams(FhirContext theContext, RestSearchParameterTypeEnum paramType, - String theUnqualifiedParamName, List theParameters) { - QueryParameterAndBinder binder; - switch (paramType) { - case COMPOSITE: - throw new UnsupportedOperationException(); - case DATE: - binder = new QueryParameterAndBinder(DateAndListParam.class, - Collections.emptyList()); - break; - case NUMBER: - binder = new QueryParameterAndBinder(NumberAndListParam.class, - Collections.emptyList()); - break; - case QUANTITY: - binder = new QueryParameterAndBinder(QuantityAndListParam.class, - Collections.emptyList()); - break; - case REFERENCE: - binder = new QueryParameterAndBinder(ReferenceAndListParam.class, - Collections.emptyList()); - break; - case STRING: - binder = new QueryParameterAndBinder(StringAndListParam.class, - Collections.emptyList()); - break; - case TOKEN: - binder = new QueryParameterAndBinder(TokenAndListParam.class, - Collections.emptyList()); - break; - case URI: - binder = new QueryParameterAndBinder(UriAndListParam.class, - Collections.emptyList()); - break; - case HAS: - binder = new QueryParameterAndBinder(HasAndListParam.class, - Collections.emptyList()); - break; - case SPECIAL: - binder = new QueryParameterAndBinder(SpecialAndListParam.class, - Collections.emptyList()); - break; - default: - throw new IllegalArgumentException("Parameter '" + theUnqualifiedParamName + "' has type " + paramType + " which is currently not supported."); - } - - return binder.parse(theContext, theUnqualifiedParamName, theParameters); - } - - /** - * This is a utility method intended provided to help the JPA module. - */ - public static IQueryParameterAnd parseQueryParams(FhirContext theContext, RuntimeSearchParam theParamDef, - String theUnqualifiedParamName, List theParameters) { - - RestSearchParameterTypeEnum paramType = theParamDef.getParamType(); - - if (paramType == RestSearchParameterTypeEnum.COMPOSITE) { - - List theCompositList = theParamDef.getCompositeOf(); - - if (theCompositList == null) { - throw new ConfigurationException("Search parameter of type " + theUnqualifiedParamName - + " can be found in parameter annotation, found "); - } - - if (theCompositList.size() != 2) { - throw new ConfigurationException("Search parameter of type " + theUnqualifiedParamName - + " must have 2 composite types declared in parameter annotation, found " - + theCompositList.size()); - } - - RuntimeSearchParam left = theCompositList.get(0); - RuntimeSearchParam right = theCompositList.get(1); - - @SuppressWarnings({ "unchecked", "rawtypes" }) - CompositeAndListParam cp = new CompositeAndListParam( - getCompositBindingClass(left.getParamType(), left.getName()), - getCompositBindingClass(right.getParamType(), right.getName())); - - cp.setValuesAsQueryTokens(theContext, theUnqualifiedParamName, theParameters); - - return cp; - } else { - return parseQueryParams(theContext, paramType, theUnqualifiedParamName, theParameters); - } - } - - private static Class getCompositBindingClass(RestSearchParameterTypeEnum paramType, - String theUnqualifiedParamName) { - - switch (paramType) { - case DATE: - return DateParam.class; - case NUMBER: - return NumberParam.class; - case QUANTITY: - return QuantityParam.class; - case REFERENCE: - return ReferenceParam.class; - case STRING: - return StringParam.class; - case TOKEN: - return TokenParam.class; - case URI: - return UriParam.class; - case HAS: - return HasParam.class; - case SPECIAL: - return SpecialParam.class; - - default: - throw new IllegalArgumentException("Parameter '" + theUnqualifiedParamName + "' has type " + paramType - + " which is currently not supported."); - } - } - /** * Removes :modifiers and .chains from URL parameter names */ diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 19e515d26a4..4dd2fffc8f8 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -3,14 +3,14 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT pom HAPI FHIR BOM ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index 8111acba863..0f7b6eb19c0 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index e1635c1b9db..dd903ab4f1b 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index e319c6da42a..39c3cc8e20d 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../../hapi-deployable-pom diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java index dc7d8cff645..b9ac064b29a 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java @@ -37,7 +37,7 @@ import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3; import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3; import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider; import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.model.dstu2.composite.MetaDt; import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator; diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index 39a66edf090..9783866a885 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index 3a02ec6e5c0..6893093762d 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index a2fa46beaab..6f0efaa5903 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 227dea350b2..20d250d8cc3 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index e76461561f4..6b0955b04ab 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index cb1302c1487..fb13d1edbb7 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -78,13 +78,13 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu2 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT compile ca.uhn.hapi.fhir hapi-fhir-jpaserver-subscription - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT compile @@ -101,7 +101,7 @@ ca.uhn.hapi.fhir hapi-fhir-testpage-overlay - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT classes diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index 47bb72c223c..6090e7545d9 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index de0d3afbb8a..6cfa369bfdc 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-example/pom.xml b/hapi-fhir-jaxrsserver-example/pom.xml index ace48cd8706..687d6b10dbf 100644 --- a/hapi-fhir-jaxrsserver-example/pom.xml +++ b/hapi-fhir-jaxrsserver-example/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-jpaserver-api/pom.xml b/hapi-fhir-jpaserver-api/pom.xml index b14ee8d6d6d..8cf5d00b229 100644 --- a/hapi-fhir-jpaserver-api/pom.xml +++ b/hapi-fhir-jpaserver-api/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 982468d6dbc..a8958f299a0 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java index b4b0530633d..0dc80bf9500 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.dao; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; @@ -34,7 +33,8 @@ import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster; import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.rest.api.QualifiedParamList; @@ -44,7 +44,6 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails; import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; -import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.param.QualifierDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; @@ -339,13 +338,12 @@ public abstract class BaseStorageDao { } // Should not be null since the check above would have caught it - RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(getResourceName()); - RuntimeSearchParam paramDef = mySearchParamRegistry.getSearchParamByName(resourceDef, qualifiedParamName.getParamName()); + RuntimeSearchParam paramDef = mySearchParamRegistry.getActiveSearchParam(getResourceName(), qualifiedParamName.getParamName()); for (String nextValue : theSource.get(nextParamName)) { QualifiedParamList qualifiedParam = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(qualifiedParamName.getWholeQualifier(), nextValue); List paramList = Collections.singletonList(qualifiedParam); - IQueryParameterAnd parsedParam = ParameterUtil.parseQueryParams(getContext(), paramDef, nextParamName, paramList); + IQueryParameterAnd parsedParam = JpaParamUtil.parseQueryParams(mySearchParamRegistry, getContext(), paramDef, nextParamName, paramList); theTarget.add(qualifiedParamName.getParamName(), parsedParam); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/LegacySearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/LegacySearchBuilder.java index 4f3a2de40f5..25a5dbbc89d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/LegacySearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/LegacySearchBuilder.java @@ -42,7 +42,6 @@ import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; -import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedCompositeStringUnique; import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; @@ -51,9 +50,8 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; import ca.uhn.fhir.jpa.util.BaseIterator; @@ -483,8 +481,7 @@ public class LegacySearchBuilder implements ISearchBuilder { return orders; } - RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(myResourceName); - RuntimeSearchParam param = mySearchParamRegistry.getSearchParamByName(resourceDef, theSort.getParamName()); + RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName()); if (param == null) { throw new InvalidRequestException("Unknown sort parameter '" + theSort.getParamName() + "'"); } @@ -757,7 +754,7 @@ public class LegacySearchBuilder implements ISearchBuilder { String paramName = nextInclude.getParamName(); if (isNotBlank(paramName)) { - param = mySearchParamRegistry.getSearchParamByName(def, paramName); + param = mySearchParamRegistry.getActiveSearchParam(resType, paramName); } else { param = null; } @@ -876,7 +873,7 @@ public class LegacySearchBuilder implements ISearchBuilder { // Since we're going to remove elements below theParams.values().forEach(nextAndList -> ensureSubListsAreWritable(nextAndList)); - List activeUniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams(myResourceName, theParams.keySet()); + List activeUniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams(myResourceName, theParams.keySet()); if (activeUniqueSearchParams.size() > 0) { StringBuilder sb = new StringBuilder(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java index f16ae6e2fac..228b0d62303 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java @@ -25,26 +25,26 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.DaoConfig; -import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; -import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.MatchResourceUrlService; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedCompositeStringUniqueDao; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; +import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedCompositeStringUnique; import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams; import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorService; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.UrlUtil; import com.google.common.annotations.VisibleForTesting; @@ -148,13 +148,14 @@ public class SearchParamWithInlineReferencesExtractor { private void extractCompositeStringUniques(ResourceTable theEntity, ResourceIndexedSearchParams theParams) { final String resourceType = theEntity.getResourceType(); - List uniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams(resourceType); + List uniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams(resourceType); - for (JpaRuntimeSearchParam next : uniqueSearchParams) { + for (RuntimeSearchParam next : uniqueSearchParams) { List> partsChoices = new ArrayList<>(); - for (RuntimeSearchParam nextCompositeOf : next.getCompositeOf()) { + List compositeComponents = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, next); + for (RuntimeSearchParam nextCompositeOf : compositeComponents) { Collection paramsListForCompositePart = null; Collection linksForCompositePart = null; Collection linksForCompositePartWantPaths = null; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderReference.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderReference.java index a973b5f52c2..bb84e96c160 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderReference.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderReference.java @@ -47,7 +47,8 @@ import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.util.SourceParam; import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster; import ca.uhn.fhir.model.api.IQueryParameterAnd; @@ -64,7 +65,6 @@ import ca.uhn.fhir.rest.param.CompositeParam; import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.HasParam; import ca.uhn.fhir.rest.param.NumberParam; -import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.param.QuantityParam; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.SpecialParam; @@ -315,9 +315,9 @@ class PredicateBuilderReference extends BasePredicateBuilder { boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain); RuntimeSearchParam param = null; if (!isMeta) { - param = mySearchParamRegistry.getSearchParamByName(typeDef, chain); + param = mySearchParamRegistry.getActiveSearchParam(subResourceName, chain); if (param == null) { - ourLog.debug("Type {} doesn't have search param {}", nextType.getSimpleName(), param); + ourLog.debug("Type {} doesn't have search param {}", subResourceName, param); continue; } } @@ -397,8 +397,7 @@ class PredicateBuilderReference extends BasePredicateBuilder { } if (resourceTypes.isEmpty()) { - RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(theResourceName); - RuntimeSearchParam searchParamByName = mySearchParamRegistry.getSearchParamByName(resourceDef, theParamName); + RuntimeSearchParam searchParamByName = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); if (searchParamByName == null) { throw new InternalErrorException("Could not find parameter " + theParamName); } @@ -480,8 +479,7 @@ class PredicateBuilderReference extends BasePredicateBuilder { } Predicate createResourceLinkPathPredicate(String theResourceName, String theParamName, From from) { - RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(theResourceName); - RuntimeSearchParam param = mySearchParamRegistry.getSearchParamByName(resourceDef, theParamName); + RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); List path = param.getPathsSplit(); /* @@ -798,7 +796,7 @@ class PredicateBuilderReference extends BasePredicateBuilder { qp = new TokenParam(); break; case COMPOSITE: - List compositeOf = theParam.getCompositeOf(); + List compositeOf = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParam); if (compositeOf.size() != 2) { throw new InternalErrorException("Parameter " + theParam.getName() + " has " + compositeOf.size() + " composite parts. Don't know how handlt this."); } @@ -957,20 +955,19 @@ class PredicateBuilderReference extends BasePredicateBuilder { //Ensure that the name of the search param // (e.g. the `code` in Patient?_has:Observation:subject:code=sys|val) // exists on the target resource type. - RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getSearchParamByName(targetResourceDefinition, paramName); + RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getActiveSearchParam(targetResourceType, paramName); if (owningParameterDef == null) { throw new InvalidRequestException("Unknown parameter name: " + targetResourceType + ':' + parameterName); } //Ensure that the name of the back-referenced search param on the target (e.g. the `subject` in Patient?_has:Observation:subject:code=sys|val) //exists on the target resource. - owningParameterDef = mySearchParamRegistry.getSearchParamByName(targetResourceDefinition, paramReference); - if (owningParameterDef == null) { + RuntimeSearchParam joiningParameterDef = mySearchParamRegistry.getActiveSearchParam(targetResourceType, paramReference); + if (joiningParameterDef == null) { throw new InvalidRequestException("Unknown parameter name: " + targetResourceType + ':' + paramReference); } - RuntimeSearchParam paramDef = mySearchParamRegistry.getSearchParamByName(targetResourceDefinition, paramName); - IQueryParameterAnd> parsedParam = (IQueryParameterAnd>) ParameterUtil.parseQueryParams(myContext, paramDef, paramName, parameters); + IQueryParameterAnd> parsedParam = (IQueryParameterAnd>) JpaParamUtil.parseQueryParams(mySearchParamRegistry, myContext, owningParameterDef, paramName, parameters); for (IQueryParameterOr next : parsedParam.getValuesAsQueryTokens()) { orValues.addAll(next.getValuesAsQueryTokens()); @@ -1011,11 +1008,12 @@ class PredicateBuilderReference extends BasePredicateBuilder { } CompositeParam cp = (CompositeParam) or; - RuntimeSearchParam left = theParamDef.getCompositeOf().get(0); + List componentParams = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParamDef); + RuntimeSearchParam left = componentParams.get(0); IQueryParameterType leftValue = cp.getLeftValue(); myQueryStack.addPredicate(createCompositeParamPart(theResourceName, myQueryStack.getRootForComposite(), left, leftValue, theRequestPartitionId)); - RuntimeSearchParam right = theParamDef.getCompositeOf().get(1); + RuntimeSearchParam right = componentParams.get(1); IQueryParameterType rightValue = cp.getRightValue(); myQueryStack.addPredicate(createCompositeParamPart(theResourceName, myQueryStack.getRootForComposite(), right, rightValue, theRequestPartitionId)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSearchParameterR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSearchParameterR4.java index 90c3dbeee0e..e6ef352724c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSearchParameterR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSearchParameterR4.java @@ -8,21 +8,18 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSearchParameter; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.ElementUtil; import ca.uhn.fhir.util.HapiExtensions; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.hl7.fhir.r4.model.CodeType; import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.SearchParameter; import org.springframework.beans.factory.annotation.Autowired; -import java.util.List; import java.util.regex.Pattern; import static org.apache.commons.lang3.StringUtils.isBlank; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java index 05962d2fcb1..b8cfc721732 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java @@ -36,7 +36,8 @@ import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; @@ -106,6 +107,8 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { @Autowired private ISearchParamRegistry mySearchParamRegistry; @Autowired + private ISearchParamRegistryController mySearchParamRegistryController; + @Autowired private PartitionSettings myPartitionSettings; /** * Constructor @@ -175,7 +178,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { install(npmPackage, theInstallationSpec, retVal); // If any SearchParameters were installed, let's load them right away - mySearchParamRegistry.refreshCacheIfNecessary(); + mySearchParamRegistryController.refreshCacheIfNecessary(); } } catch (IOException e) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java index 298e8c38239..07c0fd5769c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java @@ -27,7 +27,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.provider.ServerCapabilityStatementProvider; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.ExtensionConstants; import ca.uhn.fhir.util.ExtensionUtil; @@ -59,7 +59,7 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro /** * Constructor */ - public JpaCapabilityStatementProvider(@Nonnull RestfulServer theRestfulServer, @Nonnull IFhirSystemDao theSystemDao, @Nonnull DaoConfig theDaoConfig, @Nonnull ISearchParamRetriever theSearchParamRegistry, IValidationSupport theValidationSupport) { + public JpaCapabilityStatementProvider(@Nonnull RestfulServer theRestfulServer, @Nonnull IFhirSystemDao theSystemDao, @Nonnull DaoConfig theDaoConfig, @Nonnull ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) { super(theRestfulServer, theSearchParamRegistry, theValidationSupport); Validate.notNull(theRestfulServer); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java index 4b3ed029b94..ba1260f24bc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java @@ -26,7 +26,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.RestfulServer; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.ExtensionConstants; import org.hl7.fhir.dstu3.model.Bundle; @@ -53,7 +53,7 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se private volatile CapabilityStatement myCachedValue; private DaoConfig myDaoConfig; - private ISearchParamRetriever mySearchParamRegistry; + private ISearchParamRegistry mySearchParamRegistry; private String myImplementationDescription; private boolean myIncludeResourceCounts; private RestfulServer myRestfulServer; @@ -72,7 +72,7 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se /** * Constructor */ - public JpaConformanceProviderDstu3(RestfulServer theRestfulServer, IFhirSystemDao theSystemDao, DaoConfig theDaoConfig, ISearchParamRetriever theSearchParamRegistry) { + public JpaConformanceProviderDstu3(RestfulServer theRestfulServer, IFhirSystemDao theSystemDao, DaoConfig theDaoConfig, ISearchParamRegistry theSearchParamRegistry) { super(theRestfulServer); myRestfulServer = theRestfulServer; mySystemDao = theSystemDao; @@ -82,7 +82,7 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se setIncludeResourceCounts(true); } - public void setSearchParamRegistry(ISearchParamRetriever theSearchParamRegistry) { + public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) { mySearchParamRegistry = theSearchParamRegistry; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java index fb3489bb890..df72ae2f688 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java @@ -52,9 +52,8 @@ import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.UriPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.rest.api.SearchContainedModeEnum; import ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.jpa.searchparam.util.SourceParam; import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterOr; @@ -63,13 +62,13 @@ import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.QualifiedParamList; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; +import ca.uhn.fhir.rest.api.SearchContainedModeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.CompositeParam; import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.HasParam; import ca.uhn.fhir.rest.param.NumberParam; import ca.uhn.fhir.rest.param.ParamPrefixEnum; -import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.param.QuantityParam; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringParam; @@ -80,6 +79,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import com.google.common.collect.Lists; import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.ComboCondition; @@ -298,11 +298,12 @@ public class QueryStack { } CompositeParam cp = (CompositeParam) next; - RuntimeSearchParam left = theParamDef.getCompositeOf().get(0); + List componentParams = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParamDef); + RuntimeSearchParam left = componentParams.get(0); IQueryParameterType leftValue = cp.getLeftValue(); Condition leftPredicate = createPredicateCompositePart(theSourceJoinColumn, theResourceName, theSpnamePrefix, left, leftValue, theRequestPartitionId); - RuntimeSearchParam right = theParamDef.getCompositeOf().get(1); + RuntimeSearchParam right = componentParams.get(1); IQueryParameterType rightValue = cp.getRightValue(); Condition rightPredicate = createPredicateCompositePart(theSourceJoinColumn, theResourceName, theSpnamePrefix, right, rightValue, theRequestPartitionId); @@ -333,9 +334,16 @@ public class QueryStack { case QUANTITY: { return createPredicateQuantity(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParam, Collections.singletonList(theParamValue), null, theRequestPartitionId); } + case NUMBER: + case REFERENCE: + case COMPOSITE: + case URI: + case HAS: + case SPECIAL: + default: + throw new InvalidRequestException("Don't know how to handle composite parameter with type of " + theParam.getParamType()); } - throw new InvalidRequestException("Don't know how to handle composite parameter with type of " + theParam.getParamType()); } public Condition createPredicateCoords(@Nullable DbColumn theSourceJoinColumn, @@ -360,11 +368,11 @@ public class QueryStack { } public Condition createPredicateDate(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, + SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); - + PredicateBuilderCacheLookupResult predicateBuilderLookupResult = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.DATE, theSourceJoinColumn, paramName, () -> mySqlBuilder.addDatePredicateBuilder(theSourceJoinColumn)); DatePredicateBuilder predicateBuilder = predicateBuilderLookupResult.getResult(); boolean cacheHit = predicateBuilderLookupResult.isCacheHit(); @@ -420,51 +428,56 @@ public class QueryStack { String paramName = theFilter.getParamPath().getName(); - if (paramName.equals(IAnyResource.SP_RES_ID)) { - TokenParam param = new TokenParam(); - param.setValueAsQueryToken(null, null, null, theFilter.getValue()); - return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId); - } else if (paramName.equals(IAnyResource.SP_RES_LANGUAGE)) { - return theQueryStack3.createPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))), theFilter.getOperation()); - } else if (paramName.equals(Constants.PARAM_SOURCE)) { - TokenParam param = new TokenParam(); - param.setValueAsQueryToken(null, null, null, theFilter.getValue()); - return createPredicateSource(null, Collections.singletonList(param)); - } else { - RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName); - if (searchParam == null) { - Collection validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); - String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", paramName, theResourceName, validNames); - throw new InvalidRequestException(msg); - } - RestSearchParameterTypeEnum typeEnum = searchParam.getParamType(); - if (typeEnum == RestSearchParameterTypeEnum.URI) { - return theQueryStack3.createPredicateUri(null, theResourceName, null, searchParam, Collections.singletonList(new UriParam(theFilter.getValue())), theFilter.getOperation(), theRequest, theRequestPartitionId); - } else if (typeEnum == RestSearchParameterTypeEnum.STRING) { - return theQueryStack3.createPredicateString(null, theResourceName, null, searchParam, Collections.singletonList(new StringParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); - } else if (typeEnum == RestSearchParameterTypeEnum.DATE) { - return theQueryStack3.createPredicateDate(null, theResourceName, null, searchParam, Collections.singletonList(new DateParam(fromOperation(theFilter.getOperation()), theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); - } else if (typeEnum == RestSearchParameterTypeEnum.NUMBER) { - return theQueryStack3.createPredicateNumber(null, theResourceName, null, searchParam, Collections.singletonList(new NumberParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); - } else if (typeEnum == RestSearchParameterTypeEnum.REFERENCE) { - SearchFilterParser.CompareOperation operation = theFilter.getOperation(); - String resourceType = null; // The value can either have (Patient/123) or not have (123) a resource type, either way it's not needed here - String chain = (theFilter.getParamPath().getNext() != null) ? theFilter.getParamPath().getNext().toString() : null; - String value = theFilter.getValue(); - ReferenceParam referenceParam = new ReferenceParam(resourceType, chain, value); - return theQueryStack3.createPredicateReference(null, theResourceName, paramName, Collections.singletonList(referenceParam), operation, theRequest, theRequestPartitionId); - } else if (typeEnum == RestSearchParameterTypeEnum.QUANTITY) { - return theQueryStack3.createPredicateQuantity(null, theResourceName, null, searchParam, Collections.singletonList(new QuantityParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); - } else if (typeEnum == RestSearchParameterTypeEnum.COMPOSITE) { - throw new InvalidRequestException("Composite search parameters not currently supported with _filter clauses"); - } else if (typeEnum == RestSearchParameterTypeEnum.TOKEN) { + switch (paramName) { + case IAnyResource.SP_RES_ID: { TokenParam param = new TokenParam(); - param.setValueAsQueryToken(null, - null, - null, - theFilter.getValue()); - return theQueryStack3.createPredicateToken(null, theResourceName, null, searchParam, Collections.singletonList(param), theFilter.getOperation(), theRequestPartitionId); + param.setValueAsQueryToken(null, null, null, theFilter.getValue()); + return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId); } + case IAnyResource.SP_RES_LANGUAGE: { + return theQueryStack3.createPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))), theFilter.getOperation()); + } + case Constants.PARAM_SOURCE: { + TokenParam param = new TokenParam(); + param.setValueAsQueryToken(null, null, null, theFilter.getValue()); + return createPredicateSource(null, Collections.singletonList(param)); + } + default: + RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName); + if (searchParam == null) { + Collection validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); + String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", paramName, theResourceName, validNames); + throw new InvalidRequestException(msg); + } + RestSearchParameterTypeEnum typeEnum = searchParam.getParamType(); + if (typeEnum == RestSearchParameterTypeEnum.URI) { + return theQueryStack3.createPredicateUri(null, theResourceName, null, searchParam, Collections.singletonList(new UriParam(theFilter.getValue())), theFilter.getOperation(), theRequest, theRequestPartitionId); + } else if (typeEnum == RestSearchParameterTypeEnum.STRING) { + return theQueryStack3.createPredicateString(null, theResourceName, null, searchParam, Collections.singletonList(new StringParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + } else if (typeEnum == RestSearchParameterTypeEnum.DATE) { + return theQueryStack3.createPredicateDate(null, theResourceName, null, searchParam, Collections.singletonList(new DateParam(fromOperation(theFilter.getOperation()), theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + } else if (typeEnum == RestSearchParameterTypeEnum.NUMBER) { + return theQueryStack3.createPredicateNumber(null, theResourceName, null, searchParam, Collections.singletonList(new NumberParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + } else if (typeEnum == RestSearchParameterTypeEnum.REFERENCE) { + SearchFilterParser.CompareOperation operation = theFilter.getOperation(); + String resourceType = null; // The value can either have (Patient/123) or not have (123) a resource type, either way it's not needed here + String chain = (theFilter.getParamPath().getNext() != null) ? theFilter.getParamPath().getNext().toString() : null; + String value = theFilter.getValue(); + ReferenceParam referenceParam = new ReferenceParam(resourceType, chain, value); + return theQueryStack3.createPredicateReference(null, theResourceName, paramName, Collections.singletonList(referenceParam), operation, theRequest, theRequestPartitionId); + } else if (typeEnum == RestSearchParameterTypeEnum.QUANTITY) { + return theQueryStack3.createPredicateQuantity(null, theResourceName, null, searchParam, Collections.singletonList(new QuantityParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + } else if (typeEnum == RestSearchParameterTypeEnum.COMPOSITE) { + throw new InvalidRequestException("Composite search parameters not currently supported with _filter clauses"); + } else if (typeEnum == RestSearchParameterTypeEnum.TOKEN) { + TokenParam param = new TokenParam(); + param.setValueAsQueryToken(null, + null, + null, + theFilter.getValue()); + return theQueryStack3.createPredicateToken(null, theResourceName, null, searchParam, Collections.singletonList(param), theFilter.getOperation(), theRequestPartitionId); + } + break; } return null; } @@ -493,9 +506,8 @@ public class QueryStack { continue; } - RuntimeResourceDefinition targetResourceDefinition; try { - targetResourceDefinition = myFhirContext.getResourceDefinition(targetResourceType); + myFhirContext.getResourceDefinition(targetResourceType); } catch (DataFormatException e) { throw new InvalidRequestException("Invalid resource type: " + targetResourceType); } @@ -518,20 +530,19 @@ public class QueryStack { //Ensure that the name of the search param // (e.g. the `code` in Patient?_has:Observation:subject:code=sys|val) // exists on the target resource type. - RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getSearchParamByName(targetResourceDefinition, paramName); + RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getActiveSearchParam(targetResourceType, paramName); if (owningParameterDef == null) { throw new InvalidRequestException("Unknown parameter name: " + targetResourceType + ':' + parameterName); } //Ensure that the name of the back-referenced search param on the target (e.g. the `subject` in Patient?_has:Observation:subject:code=sys|val) //exists on the target resource. - owningParameterDef = mySearchParamRegistry.getSearchParamByName(targetResourceDefinition, paramReference); - if (owningParameterDef == null) { + RuntimeSearchParam joiningParameterDef = mySearchParamRegistry.getActiveSearchParam(targetResourceType, paramReference); + if (joiningParameterDef == null) { throw new InvalidRequestException("Unknown parameter name: " + targetResourceType + ':' + paramReference); } - RuntimeSearchParam paramDef = mySearchParamRegistry.getSearchParamByName(targetResourceDefinition, paramName); - IQueryParameterAnd parsedParam = ParameterUtil.parseQueryParams(myFhirContext, paramDef, paramName, parameters); + IQueryParameterAnd parsedParam = JpaParamUtil.parseQueryParams(mySearchParamRegistry, myFhirContext, owningParameterDef, paramName, parameters); for (IQueryParameterOr next : parsedParam.getValuesAsQueryTokens()) { orValues.addAll(next.getValuesAsQueryTokens()); @@ -607,8 +618,8 @@ public class QueryStack { } public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, + SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); @@ -634,7 +645,7 @@ public class QueryStack { operation = toOperation(param.getPrefix()); } - + Condition predicate = join.createPredicateNumeric(theResourceName, paramName, operation, value, theRequestPartitionId, nextOr); codePredicates.add(predicate); @@ -648,8 +659,8 @@ public class QueryStack { } public Condition createPredicateQuantity(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, + SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); @@ -719,9 +730,9 @@ public class QueryStack { } private Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn, - String theResourceName, String theParamName, RuntimeSearchParam theSearchParam, - List theList, SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + String theResourceName, String theParamName, RuntimeSearchParam theSearchParam, + List theList, SearchFilterParser.CompareOperation theOperation, + RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { String spnamePrefix = theParamName; @@ -767,7 +778,7 @@ public class QueryStack { if (targetParamDefinition == null) { throw new InvalidRequestException("Unknown search parameter name: " + theSearchParam.getName() + '.' + targetParamName + "."); } - + qp = toParameterType(targetParamDefinition); qp.setValueAsQueryToken(myFhirContext, targetParamName, targetQualifier, targetValue); orValues.add(qp); @@ -777,47 +788,50 @@ public class QueryStack { if (targetParamDefinition == null) { throw new InvalidRequestException("Unknown search parameter name: " + theSearchParam.getName() + "."); } - + // 3. create the query Condition containedCondition = null; switch (targetParamDefinition.getParamType()) { - case DATE: - containedCondition = createPredicateDate(null, theResourceName, spnamePrefix, targetParamDefinition, + case DATE: + containedCondition = createPredicateDate(null, theResourceName, spnamePrefix, targetParamDefinition, orValues, theOperation, theRequestPartitionId); - break; - case NUMBER: - containedCondition = createPredicateNumber(null, theResourceName, spnamePrefix, targetParamDefinition, + break; + case NUMBER: + containedCondition = createPredicateNumber(null, theResourceName, spnamePrefix, targetParamDefinition, orValues, theOperation, theRequestPartitionId); - break; - case QUANTITY: - containedCondition = createPredicateQuantity(null, theResourceName, spnamePrefix, targetParamDefinition, + break; + case QUANTITY: + containedCondition = createPredicateQuantity(null, theResourceName, spnamePrefix, targetParamDefinition, orValues, theOperation, theRequestPartitionId); - break; - case STRING: - containedCondition = createPredicateString(null, theResourceName, spnamePrefix, targetParamDefinition, + break; + case STRING: + containedCondition = createPredicateString(null, theResourceName, spnamePrefix, targetParamDefinition, orValues, theOperation, theRequestPartitionId); - break; - case TOKEN: - containedCondition = createPredicateToken(null, theResourceName, spnamePrefix, targetParamDefinition, + break; + case TOKEN: + containedCondition = createPredicateToken(null, theResourceName, spnamePrefix, targetParamDefinition, orValues, theOperation, theRequestPartitionId); - break; - case COMPOSITE: - containedCondition = createPredicateComposite(null, theResourceName, spnamePrefix, targetParamDefinition, + break; + case COMPOSITE: + containedCondition = createPredicateComposite(null, theResourceName, spnamePrefix, targetParamDefinition, orValues, theRequestPartitionId); - break; - case URI: - containedCondition = createPredicateUri(null, theResourceName, spnamePrefix, targetParamDefinition, + break; + case URI: + containedCondition = createPredicateUri(null, theResourceName, spnamePrefix, targetParamDefinition, orValues, theOperation, theRequest, theRequestPartitionId); - break; - default: - throw new InvalidRequestException( + break; + case HAS: + case REFERENCE: + case SPECIAL: + default: + throw new InvalidRequestException( "The search type:" + targetParamDefinition.getParamType() + " is not supported."); } return containedCondition; } - + @Nullable public Condition createPredicateResourceId(@Nullable DbColumn theSourceJoinColumn, List> theValues, String theResourceName, SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { ResourceIdPredicateBuilder builder = mySqlBuilder.newResourceIdBuilder(); @@ -861,11 +875,11 @@ public class QueryStack { } public Condition createPredicateString(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, + SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); - + StringPredicateBuilder join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.STRING, theSourceJoinColumn, paramName, () -> mySqlBuilder.addStringPredicateBuilder(theSourceJoinColumn)).getResult(); if (theList.get(0).getMissing() != null) { @@ -971,8 +985,8 @@ public class QueryStack { } public Condition createPredicateToken(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, + SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { List tokens = new ArrayList<>(); for (IQueryParameterType nextOr : theList) { @@ -1011,7 +1025,7 @@ public class QueryStack { if (tokens.isEmpty()) { return null; } - + String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); TokenPredicateBuilder join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TOKEN, theSourceJoinColumn, paramName, () -> mySqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)).getResult(); @@ -1025,12 +1039,12 @@ public class QueryStack { } public Condition createPredicateUri(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestDetails theRequestDetails, - RequestPartitionId theRequestPartitionId) { + String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, + SearchFilterParser.CompareOperation theOperation, RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); - + UriPredicateBuilder join = mySqlBuilder.addUriPredicateBuilder(theSourceJoinColumn); if (theList.get(0).getMissing() != null) { @@ -1110,7 +1124,7 @@ public class QueryStack { for (List nextAnd : theAndOrParams) { if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE)) andPredicates.add(createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId)); - else + else andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId)); } break; @@ -1204,6 +1218,46 @@ public class QueryStack { mySqlBuilder.addPredicate(predicate); } + private IQueryParameterType toParameterType(RuntimeSearchParam theParam) { + + IQueryParameterType qp; + switch (theParam.getParamType()) { + case DATE: + qp = new DateParam(); + break; + case NUMBER: + qp = new NumberParam(); + break; + case QUANTITY: + qp = new QuantityParam(); + break; + case STRING: + qp = new StringParam(); + break; + case TOKEN: + qp = new TokenParam(); + break; + case COMPOSITE: + List compositeOf = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParam); + if (compositeOf.size() != 2) { + throw new InternalErrorException("Parameter " + theParam.getName() + " has " + compositeOf.size() + " composite parts. Don't know how handlt this."); + } + IQueryParameterType leftParam = toParameterType(compositeOf.get(0)); + IQueryParameterType rightParam = toParameterType(compositeOf.get(1)); + qp = new CompositeParam<>(leftParam, rightParam); + break; + case URI: + qp = new UriParam(); + break; + case HAS: + case REFERENCE: + case SPECIAL: + default: + throw new InvalidRequestException("The search type: " + theParam.getParamType() + " is not supported."); + } + return qp; + } + private enum PredicateBuilderTypeEnum { DATE, COORDS, NUMBER, QUANTITY, REFERENCE, SOURCE, STRING, TOKEN, TAG } @@ -1343,48 +1397,11 @@ public class QueryStack { return parameter.substring(parameter.indexOf(".") + 1); } - private IQueryParameterType toParameterType(RuntimeSearchParam theParam) { - - IQueryParameterType qp; - switch (theParam.getParamType()) { - case DATE: - qp = new DateParam(); - break; - case NUMBER: - qp = new NumberParam(); - break; - case QUANTITY: - qp = new QuantityParam(); - break; - case STRING: - qp = new StringParam(); - break; - case TOKEN: - qp = new TokenParam(); - break; - case COMPOSITE: - List compositeOf = theParam.getCompositeOf(); - if (compositeOf.size() != 2) { - throw new InternalErrorException("Parameter " + theParam.getName() + " has " + compositeOf.size() + " composite parts. Don't know how handlt this."); - } - IQueryParameterType leftParam = toParameterType(compositeOf.get(0)); - IQueryParameterType rightParam = toParameterType(compositeOf.get(1)); - qp = new CompositeParam<>(leftParam, rightParam); - break; - case URI: - qp = new UriParam(); - break; - default: - throw new InvalidRequestException("The search type: " + theParam.getParamType() + " is not supported."); - } - return qp; - } - public static String getParamNameWithPrefix(String theSpnamePrefix, String theParamName) { - + if (isBlank(theSpnamePrefix)) return theParamName; - + return theSpnamePrefix + "." + theParamName; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index db28da9ce88..fde8a54dc16 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -55,9 +55,9 @@ import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; import ca.uhn.fhir.rest.api.SearchContainedModeEnum; @@ -516,8 +516,7 @@ public class SearchBuilder implements ISearchBuilder { } else { - RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(myResourceName); - RuntimeSearchParam param = mySearchParamRegistry.getSearchParamByName(resourceDef, theSort.getParamName()); + RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName()); if (param == null) { String msg = myContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSortParameter", theSort.getParamName(), getResourceName(), mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(getResourceName())); throw new InvalidRequestException(msg); @@ -546,17 +545,17 @@ public class SearchBuilder implements ISearchBuilder { theQueryStack.addSortOnQuantity(myResourceName, theSort.getParamName(), ascending); break; case COMPOSITE: - List compositList = param.getCompositeOf(); - if (compositList == null) { + List compositeList = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); + if (compositeList == null) { throw new InvalidRequestException("The composite _sort parameter " + theSort.getParamName() + " is not defined by the resource " + myResourceName); } - if (compositList.size() != 2) { + if (compositeList.size() != 2) { throw new InvalidRequestException("The composite _sort parameter " + theSort.getParamName() + " must have 2 composite types declared in parameter annotation, found " - + compositList.size()); + + compositeList.size()); } - RuntimeSearchParam left = compositList.get(0); - RuntimeSearchParam right = compositList.get(1); + RuntimeSearchParam left = compositeList.get(0); + RuntimeSearchParam right = compositeList.get(1); createCompositeSort(theQueryStack, myResourceName, left.getParamType(), left.getName(), ascending); createCompositeSort(theQueryStack, myResourceName, right.getParamType(), right.getName(), ascending); @@ -835,7 +834,7 @@ public class SearchBuilder implements ISearchBuilder { String paramName = nextInclude.getParamName(); if (isNotBlank(paramName)) { - param = mySearchParamRegistry.getSearchParamByName(def, paramName); + param = mySearchParamRegistry.getActiveSearchParam(resType, paramName); } else { param = null; } @@ -969,7 +968,7 @@ public class SearchBuilder implements ISearchBuilder { // Since we're going to remove elements below theParams.values().forEach(nextAndList -> ensureSubListsAreWritable(nextAndList)); - List activeUniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams(myResourceName, theParams.keySet()); + List activeUniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams(myResourceName, theParams.keySet()); if (activeUniqueSearchParams.size() > 0) { StringBuilder sb = new StringBuilder(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java index f37a3e428f8..f8af134ad40 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java @@ -43,8 +43,9 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.rest.api.SearchContainedModeEnum; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; @@ -367,7 +368,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder { boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain); RuntimeSearchParam param = null; if (!isMeta) { - param = mySearchParamRegistry.getSearchParamByName(typeDef, chain); + param = mySearchParamRegistry.getActiveSearchParam(nextType, chain); if (param == null) { ourLog.debug("Type {} doesn't have search param {}", nextType, param); continue; @@ -431,8 +432,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder { } if (resourceTypes.isEmpty()) { - RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(theResourceName); - RuntimeSearchParam searchParamByName = mySearchParamRegistry.getSearchParamByName(resourceDef, theParamName); + RuntimeSearchParam searchParamByName = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); if (searchParamByName == null) { throw new InternalErrorException("Could not find parameter " + theParamName); } @@ -495,8 +495,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder { } public List createResourceLinkPaths(String theResourceName, String theParamName) { - RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(theResourceName); - RuntimeSearchParam param = mySearchParamRegistry.getSearchParamByName(resourceDef, theParamName); + RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); List path = param.getPathsSplit(); /* @@ -558,7 +557,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder { qp = new TokenParam(); break; case COMPOSITE: - List compositeOf = theParam.getCompositeOf(); + List compositeOf = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParam); if (compositeOf.size() != 2) { throw new InternalErrorException("Parameter " + theParam.getName() + " has " + compositeOf.size() + " composite parts. Don't know how handlt this."); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java index 937f0dfb835..b4a2fb9ee0d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java @@ -36,7 +36,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java index 8d6007c492e..8f22d6dda97 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java @@ -25,10 +25,9 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; -import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.model.dstu2.resource.Bundle; @@ -50,7 +49,6 @@ import org.apache.commons.io.IOUtils; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.SessionFactory; -import org.hibernate.jdbc.Work; import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; import org.hibernate.search.engine.cfg.BackendSettings; @@ -71,7 +69,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.data.domain.Pageable; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.test.context.TestPropertySource; import org.springframework.transaction.PlatformTransactionManager; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java index 87ad9c1383b..483e0874f02 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java @@ -17,12 +17,12 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao; import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu2; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader; import ca.uhn.fhir.jpa.util.ResourceCountCache; @@ -60,12 +60,10 @@ import ca.uhn.fhir.model.dstu2.resource.ValueSet; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; -import ca.uhn.fhir.util.TestUtil; import org.apache.commons.io.IOUtils; import org.hibernate.search.mapper.orm.Search; import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; @@ -82,6 +80,7 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.persistence.EntityManager; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import static org.junit.jupiter.api.Assertions.fail; @@ -94,6 +93,8 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest { @Autowired protected ISearchParamRegistry mySearchParamRegistry; @Autowired + protected ISearchParamRegistryController mySearchParamRegistryController; + @Autowired protected ApplicationContext myAppCtx; @Autowired protected IResourceReindexingSvc myResourceReindexingSvc; @@ -267,7 +268,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest { if (stream == null) { fail("Unable to load resource: " + resourceName); } - String string = IOUtils.toString(stream, "UTF-8"); + String string = IOUtils.toString(stream, StandardCharsets.UTF_8); IParser newJsonParser = EncodingEnum.detectEncodingNoDefault(string).newParser(myFhirCtx); return newJsonParser.parseResource(type, string); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java index 0bdaad0cc29..a0edf4f6e2e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java @@ -254,7 +254,7 @@ public class FhirResourceDaoDstu2SearchCustomSearchParamTest extends BaseJpaDstu mySearchParameterDao.update(numberParameter); // This fires every 10 seconds - mySearchParamRegistry.refreshCacheIfNecessary(); + mySearchParamRegistryController.refreshCacheIfNecessary(); Patient patient = new Patient(); patient.setId("future-appointment-count-pt"); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java index 9f86a56cde1..109a59cdad0 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java @@ -33,13 +33,12 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java index 68ba66f1096..bda399dbd76 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java @@ -5,7 +5,7 @@ import ca.uhn.fhir.context.phonetic.PhoneticEncoderEnum; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.util.HapiExtensions; import org.apache.commons.codec.language.Soundex; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java index 1287cc05e15..a5fd819663b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java @@ -1605,7 +1605,7 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test verify(interceptor, times(1)).invoke(any(), paramsCaptor.capture()); StorageProcessingMessage msg = paramsCaptor.getValue().get(StorageProcessingMessage.class); - assertThat(msg.getMessage(), containsString("refers to unknown component foo, ignoring this parameter")); + assertThat(msg.getMessage(), containsString("ignoring this parameter")); } finally { myInterceptorRegistry.unregisterInterceptor(interceptor); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java index 252df5cfdd2..a35337b8a16 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java @@ -48,7 +48,7 @@ import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java index 8ac8333c6c0..4f0eb913e03 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java @@ -13,7 +13,7 @@ import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.parser.IParser; @@ -23,7 +23,6 @@ import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParamModifier; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.util.TestUtil; import org.apache.commons.io.IOUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -43,7 +42,6 @@ import org.hl7.fhir.r4.model.Meta; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Organization; import org.hl7.fhir.r4.model.ValueSet; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java index 751b0696713..c5ba2df8890 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java @@ -14,7 +14,7 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java index f767a0c58ab..7f9fc10925a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.dao.r4; +import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -10,9 +11,9 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.util.SpringObjectCaster; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.DateParam; @@ -23,11 +24,9 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.HapiExtensions; -import ca.uhn.fhir.util.TestUtil; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.*; import org.hl7.fhir.r4.model.Enumerations.PublicationStatus; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -707,7 +706,7 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test { ResourceReindexingSvcImpl svc = SpringObjectCaster.getTargetObject(myResourceReindexingSvc, ResourceReindexingSvcImpl.class); svc.initExecutor(); - List uniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams("Observation"); + List uniqueSearchParams = mySearchParamRegistry.getActiveUniqueSearchParams("Observation"); assertEquals(0, uniqueSearchParams.size()); Patient pt1 = new Patient(); @@ -1487,14 +1486,16 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test { @Test public void testDetectUniqueSearchParams() { createUniqueBirthdateAndGenderSps(); - List params = mySearchParamRegistry.getActiveUniqueSearchParams("Patient"); + List params = mySearchParamRegistry.getActiveUniqueSearchParams("Patient"); assertEquals(1, params.size()); assertEquals(params.get(0).isUnique(), true); - assertEquals(2, params.get(0).getCompositeOf().size()); + assertEquals(2, params.get(0).getComponents().size()); + // Should be alphabetical order - assertEquals("birthdate", params.get(0).getCompositeOf().get(0).getName()); - assertEquals("gender", params.get(0).getCompositeOf().get(1).getName()); + List compositeParams = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, params.get(0)); + assertEquals("birthdate", compositeParams.get(0).getName()); + assertEquals("gender", compositeParams.get(1).getName()); } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java index 926a58716dd..89a2dfbbfbf 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java @@ -15,13 +15,13 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantityNormalized import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor; import ca.uhn.fhir.jpa.searchparam.extractor.PathAndRef; import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorR4; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController; import ca.uhn.fhir.jpa.searchparam.registry.ReadOnlySearchParamCache; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.HapiExtensions; import com.google.common.collect.Sets; import org.hl7.fhir.r4.model.BooleanType; @@ -37,6 +37,7 @@ import org.hl7.fhir.r4.model.Quantity; import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.SearchParameter; import org.hl7.fhir.r4.model.StringType; +import org.jetbrains.annotations.Nullable; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; @@ -44,7 +45,6 @@ import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -327,7 +327,7 @@ public class SearchParamExtractorR4Test { public void testExtensionContainingReference() { String path = "Patient.extension('http://patext').value.as(Reference)"; - RuntimeSearchParam sp = new RuntimeSearchParam("extpat", "Patient SP", path, RestSearchParameterTypeEnum.REFERENCE, new HashSet<>(), Sets.newHashSet("Patient"), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE); + RuntimeSearchParam sp = new RuntimeSearchParam(null, null, "extpat", "Patient SP", path, RestSearchParameterTypeEnum.REFERENCE, new HashSet<>(), Sets.newHashSet("Patient"), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null); mySearchParamRegistry.addSearchParam(sp); Patient patient = new Patient(); @@ -395,7 +395,7 @@ public class SearchParamExtractorR4Test { assertEquals(2, list.size()); } - private static class MySearchParamRegistry implements ISearchParamRegistry { + private static class MySearchParamRegistry implements ISearchParamRegistry, ISearchParamRegistryController { private List myExtraSearchParams = new ArrayList<>(); @@ -416,7 +416,6 @@ public class SearchParamExtractorR4Test { return new ResourceChangeResult(); } - @Override public ReadOnlySearchParamCache getActiveSearchParams() { throw new UnsupportedOperationException(); } @@ -441,12 +440,18 @@ public class SearchParamExtractorR4Test { } @Override - public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public RuntimeSearchParam getActiveSearchParamByUrl(String theUrl) { throw new UnsupportedOperationException(); } @Override - public List getActiveUniqueSearchParams(String theResourceName) { + public List getActiveUniqueSearchParams(String theResourceName) { throw new UnsupportedOperationException(); } @@ -455,16 +460,6 @@ public class SearchParamExtractorR4Test { // nothing } - @Override - public RuntimeSearchParam getSearchParamByName(RuntimeResourceDefinition theResourceDef, String theParamName) { - return null; - } - - @Override - public Collection getSearchParamsByResourceType(RuntimeResourceDefinition theResourceDef) { - return null; - } - @Override public void setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) { // nothing diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java index 3e7b50b22d7..7f657a06266 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java @@ -189,7 +189,7 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test { ourClient.update().resource(numberParameter).execute(); // This fires every 10 seconds - mySearchParamRegistry.refreshCacheIfNecessary(); + mySearchParamRegistryController.refreshCacheIfNecessary(); Patient patient = new Patient(); patient.setId("future-appointment-count-pt"); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java index 991de4e3eca..ed39b28723b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java @@ -393,7 +393,9 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide fooSp.setStatus(org.hl7.fhir.r4.model.Enumerations.PublicationStatus.ACTIVE); mySearchParameterDao.create(fooSp, mySrd); + myCaptureQueriesListener.clear(); mySearchParamRegistry.forceRefresh(); + myCaptureQueriesListener.logAllQueriesForCurrentThread(); Patient pat = new Patient(); pat.setGender(AdministrativeGender.MALE); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java index 36e6ad17a72..e79994980ee 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java @@ -629,7 +629,7 @@ public class ResourceProviderHasParamR4Test extends BaseResourceProviderR4Test { try (CloseableHttpResponse response = ourHttpClient.execute(get)) { String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(resp); + ourLog.info("Response was: {}", resp); Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp); ids = toUnqualifiedVersionlessIdValues(bundle); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java index 3ecd9e7ad78..66fc2d75e8c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java @@ -13,11 +13,10 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.BeforeEach; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/searchparam/MatchUrlServiceTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/searchparam/MatchUrlServiceTest.java index a060380f371..5b47c24a5be 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/searchparam/MatchUrlServiceTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/searchparam/MatchUrlServiceTest.java @@ -5,14 +5,12 @@ import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.jpa.config.TestDstu3Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; import ca.uhn.fhir.rest.param.QuantityParam; import ca.uhn.fhir.rest.param.ReferenceParam; -import ca.uhn.fhir.util.TestUtil; import org.hl7.fhir.dstu3.model.Condition; import org.hl7.fhir.dstu3.model.Location; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; @@ -41,7 +39,7 @@ public class MatchUrlServiceTest extends BaseJpaTest { public void testTranslateMatchUrl() { RuntimeResourceDefinition resourceDef = ourCtx.getResourceDefinition(Condition.class); ISearchParamRegistry searchParamRegistry = mock(ISearchParamRegistry.class); - when(searchParamRegistry.getSearchParamByName(any(RuntimeResourceDefinition.class), eq("patient"))).thenReturn(resourceDef.getSearchParam("patient")); + when(searchParamRegistry.getActiveSearchParam(any(), eq("patient"))).thenReturn(resourceDef.getSearchParam("patient")); SearchParameterMap match = myMatchUrlService.translateMatchUrl("Condition?patient=304&_lastUpdated=>2011-01-01T11:12:21.0000Z", resourceDef); assertEquals("2011-01-01T11:12:21.0000Z", match.getLastUpdated().getLowerBound().getValueAsString()); assertEquals(ReferenceParam.class, match.get("patient").get(0).get(0).getClass()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java index 84f0f663b9c..23f79370f69 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java @@ -15,7 +15,7 @@ import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4; diff --git a/hapi-fhir-jpaserver-batch/pom.xml b/hapi-fhir-jpaserver-batch/pom.xml index 4a74a2fe955..781f0e34231 100644 --- a/hapi-fhir-jpaserver-batch/pom.xml +++ b/hapi-fhir-jpaserver-batch/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-cql/pom.xml b/hapi-fhir-jpaserver-cql/pom.xml index 8c132ab6a8c..d7df70c1bea 100644 --- a/hapi-fhir-jpaserver-cql/pom.xml +++ b/hapi-fhir-jpaserver-cql/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -144,13 +144,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 4f63e20b3f2..855dfe0501a 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -55,13 +55,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java index 204b27e69f1..97b2aeefad2 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java @@ -64,7 +64,7 @@ import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmGoldenResourceFindingSvc; import ca.uhn.fhir.jpa.mdm.svc.candidate.FindCandidateByEidSvc; import ca.uhn.fhir.jpa.mdm.svc.candidate.FindCandidateByLinkSvc; import ca.uhn.fhir.jpa.mdm.svc.candidate.FindCandidateByExampleSvc; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.validation.IResourceLoader; import org.slf4j.Logger; import org.springframework.context.annotation.Bean; @@ -153,7 +153,7 @@ public class MdmConsumerConfig { } @Bean - MdmRuleValidator mdmRuleValidator(FhirContext theFhirContext, ISearchParamRetriever theSearchParamRetriever) { + MdmRuleValidator mdmRuleValidator(FhirContext theFhirContext, ISearchParamRegistry theSearchParamRetriever) { return new MdmRuleValidator(theFhirContext, theSearchParamRetriever); } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubmitterConfig.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubmitterConfig.java index dd840b3189d..be9bb320ecd 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubmitterConfig.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubmitterConfig.java @@ -32,7 +32,7 @@ import ca.uhn.fhir.jpa.mdm.svc.MdmGoldenResourceDeletingSvc; import ca.uhn.fhir.jpa.mdm.svc.MdmSearchParamSvc; import ca.uhn.fhir.jpa.mdm.svc.MdmSubmitSvcImpl; import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; @@ -51,7 +51,7 @@ public class MdmSubmitterConfig { } @Bean - MdmRuleValidator mdmRuleValidator(FhirContext theFhirContext, ISearchParamRetriever theSearchParamRetriever) { + MdmRuleValidator mdmRuleValidator(FhirContext theFhirContext, ISearchParamRegistry theSearchParamRetriever) { return new MdmRuleValidator(theFhirContext, theSearchParamRetriever); } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSearchParamSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSearchParamSvc.java index 4d8184c8a5f..944819c3163 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSearchParamSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSearchParamSvc.java @@ -30,7 +30,7 @@ import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorService; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml index 1dead963b6a..767f5422571 100644 --- a/hapi-fhir-jpaserver-migrate/pom.xml +++ b/hapi-fhir-jpaserver-migrate/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index cbefeedd228..fc6b066de31 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 73b19e1e190..23fa53e26a7 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java index d5cd43e6048..c197da32f1e 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java @@ -72,6 +72,7 @@ public class ResourceChangeEvent implements IResourceChangeEvent { return myDeletedResourceIds; } + @Override public boolean isEmpty() { return myCreatedResourceIds.isEmpty() && myUpdatedResourceIds.isEmpty() && myDeletedResourceIds.isEmpty(); } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/JpaRuntimeSearchParam.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/JpaRuntimeSearchParam.java deleted file mode 100644 index 8dd99d34106..00000000000 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/JpaRuntimeSearchParam.java +++ /dev/null @@ -1,88 +0,0 @@ -package ca.uhn.fhir.jpa.searchparam; - -/*- - * #%L - * HAPI FHIR Search Parameters - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.context.RuntimeSearchParam; -import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; -import org.hl7.fhir.instance.model.api.IBaseReference; -import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.instance.model.api.IPrimitiveType; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -public class JpaRuntimeSearchParam extends RuntimeSearchParam { - - private final boolean myUnique; - private final List myComponents; - - /** - * Constructor - */ - public JpaRuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, boolean theUnique, List theComponents, Collection theBase) { - super(theId, theUri, theName, theDescription, thePath, theParamType, createCompositeList(theParamType), theProvidesMembershipInCompartments, theTargets, theStatus, theBase); - myUnique = theUnique; - myComponents = Collections.unmodifiableList(theComponents); - } - - public List getComponents() { - return myComponents; - } - - public boolean isUnique() { - return myUnique; - } - - public static class Component { - private final String myExpression; - private final IBaseReference myReference; - - public Component(String theExpression, IBaseReference theReference) { - myExpression = theExpression; - myReference = theReference; - - } - - public String getExpression() { - return myExpression; - } - - public IBaseReference getReference() { - return myReference; - } - } - - private static ArrayList createCompositeList(RestSearchParameterTypeEnum theParamType) { - if (theParamType == RestSearchParameterTypeEnum.COMPOSITE) { - return new ArrayList<>(); - } else { - return null; - } - } - - -} diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java index 1927b4e9b6e..d671c3cb870 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java @@ -24,7 +24,8 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.Include; @@ -99,7 +100,7 @@ public class MatchUrlService { } } } else if (Constants.PARAM_HAS.equals(nextParamName)) { - IQueryParameterAnd param = ParameterUtil.parseQueryParams(myContext, RestSearchParameterTypeEnum.HAS, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams(myContext, RestSearchParameterTypeEnum.HAS, nextParamName, paramList); paramMap.add(nextParamName, param); } else if (Constants.PARAM_COUNT.equals(nextParamName)) { if (paramList != null && paramList.size() > 0 && paramList.get(0).size() > 0) { @@ -127,23 +128,23 @@ public class MatchUrlService { type.setValuesAsQueryTokens(myContext, nextParamName, (paramList)); paramMap.add(nextParamName, type); } else if (Constants.PARAM_SOURCE.equals(nextParamName)) { - IQueryParameterAnd param = ParameterUtil.parseQueryParams(myContext, RestSearchParameterTypeEnum.TOKEN, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams(myContext, RestSearchParameterTypeEnum.TOKEN, nextParamName, paramList); paramMap.add(nextParamName, param); } else if (JpaConstants.PARAM_DELETE_EXPUNGE.equals(nextParamName)) { paramMap.setDeleteExpunge(true); } else if (Constants.PARAM_LIST.equals(nextParamName)) { - IQueryParameterAnd param = ParameterUtil.parseQueryParams(myContext, RestSearchParameterTypeEnum.TOKEN, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams(myContext, RestSearchParameterTypeEnum.TOKEN, nextParamName, paramList); paramMap.add(nextParamName, param); } else if (nextParamName.startsWith("_")) { // ignore these since they aren't search params (e.g. _sort) } else { - RuntimeSearchParam paramDef = mySearchParamRegistry.getSearchParamByName(theResourceDefinition, nextParamName); + RuntimeSearchParam paramDef = mySearchParamRegistry.getActiveSearchParam(theResourceDefinition.getName(), nextParamName); if (paramDef == null) { throw new InvalidRequestException( "Failed to parse match URL[" + theMatchUrl + "] - Resource type " + theResourceDefinition.getName() + " does not have a parameter with name: " + nextParamName); } - IQueryParameterAnd param = ParameterUtil.parseQueryParams(myContext, paramDef, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams(mySearchParamRegistry, myContext, paramDef, nextParamName, paramList); paramMap.add(nextParamName, param); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java index dc0951d6458..d006efab38d 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java @@ -39,7 +39,7 @@ import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorService; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.jpa.searchparam.matcher.IndexedSearchParamExtractor; import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.searchparam.registry.SearchParameterCanonicalizer; import org.springframework.beans.factory.annotation.Autowired; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java index 82b2ffc848d..45f9c3fbccd 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java @@ -41,7 +41,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; import ca.uhn.fhir.jpa.searchparam.SearchParamConstants; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.model.primitive.BoundCodeDt; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java index 70cba214b70..798a35b3f52 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.searchparam.extractor; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.model.dstu2.composite.ContactPointDt; import ca.uhn.fhir.util.FhirTerser; import org.hl7.fhir.instance.model.api.IBase; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java index e92e62dc9d9..f701a081ad6 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.searchparam.extractor; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import com.google.common.annotations.VisibleForTesting; import org.hl7.fhir.dstu3.context.IWorkerContext; import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java index 5e61663ab01..c5289ad152f 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.searchparam.extractor; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import com.google.common.annotations.VisibleForTesting; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.PathEngineException; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java index 38e6b082f12..62498092baf 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.searchparam.extractor; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBaseResource; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java index cb5dd573f44..7f18e809b5d 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java @@ -43,18 +43,15 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster; import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.util.FhirTerser; -import ca.uhn.fhir.util.ResourceReferenceInfo; -import ca.uhn.fhir.util.StringUtil; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.StringUtils; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java index 984989ca477..0978dd1f355 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java @@ -27,7 +27,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.util.SourceParam; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.Constants; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java deleted file mode 100644 index 7f8dbf07ffd..00000000000 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java +++ /dev/null @@ -1,77 +0,0 @@ -package ca.uhn.fhir.jpa.searchparam.registry; - -/* - * #%L - * HAPI FHIR Search Parameters - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.context.RuntimeResourceDefinition; -import ca.uhn.fhir.context.RuntimeSearchParam; -import ca.uhn.fhir.context.phonetic.IPhoneticEncoder; -import ca.uhn.fhir.jpa.cache.ResourceChangeResult; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; - -import java.util.Collection; -import java.util.List; -import java.util.Set; - -public interface ISearchParamRegistry extends ISearchParamRetriever { - - /** - * Request that the cache be refreshed now, in the current thread - */ - void forceRefresh(); - - /** - * @return the number of search parameter entries changed - */ - ResourceChangeResult refreshCacheIfNecessary(); - - ReadOnlySearchParamCache getActiveSearchParams(); - - List getActiveUniqueSearchParams(String theResourceName, Set theParamNames); - - List getActiveUniqueSearchParams(String theResourceName); - - /** - * Request that the cache be refreshed at the next convenient time (in a different thread) - */ - void requestRefresh(); - - RuntimeSearchParam getSearchParamByName(RuntimeResourceDefinition theResourceDef, String theParamName); - - Collection getSearchParamsByResourceType(RuntimeResourceDefinition theResourceDef); - - /** - * When indexing a HumanName, if a StringEncoder is set in the context, then the "phonetic" search parameter will normalize - * the String using this encoder. - * - * @since 5.1.0 - */ - void setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder); - - /** - * Returns a collection containing all of the valid active search parameters. This method is intended for - * creating error messages for users as opposed to actual search processing. It will include meta parameters - * such as _id and _lastUpdated. - */ - default Collection getValidSearchParameterNamesIncludingMeta(String theResourceName) { - return getActiveSearchParams().getValidSearchParameterNamesIncludingMeta(theResourceName); - } -} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRetriever.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java similarity index 52% rename from hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRetriever.java rename to hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java index a1b0a6b2a2c..d0c432dc8fa 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRetriever.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java @@ -1,8 +1,8 @@ -package ca.uhn.fhir.rest.server.util; +package ca.uhn.fhir.jpa.searchparam.registry; /*- * #%L - * HAPI FHIR - Server Framework + * HAPI FHIR Search Parameters * %% * Copyright (C) 2014 - 2021 Smile CDR, Inc. * %% @@ -18,25 +18,12 @@ package ca.uhn.fhir.rest.server.util; * See the License for the specific language governing permissions and * limitations under the License. * #L% - */ - -import ca.uhn.fhir.context.RuntimeSearchParam; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.util.Map; - -public interface ISearchParamRetriever { - /** - * @return Returns {@literal null} if no match */ - @Nullable - RuntimeSearchParam getActiveSearchParam(String theResourceName, String theParamName); - /** - * @return Returns all active search params for the given resource - */ - @Nonnull - Map getActiveSearchParams(String theResourceName); +import ca.uhn.fhir.jpa.cache.ResourceChangeResult; + +public interface ISearchParamRegistryController { + + ResourceChangeResult refreshCacheIfNecessary(); } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java index a2b054295f8..e529bdcf982 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java @@ -26,10 +26,8 @@ import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,29 +39,32 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + public class JpaSearchParamCache { private static final Logger ourLog = LoggerFactory.getLogger(JpaSearchParamCache.class); - private volatile Map> myActiveUniqueSearchParams = Collections.emptyMap(); - private volatile Map, List>> myActiveParamNamesToUniqueSearchParams = Collections.emptyMap(); + private volatile Map> myActiveUniqueSearchParams = Collections.emptyMap(); + private volatile Map, List>> myActiveParamNamesToUniqueSearchParams = Collections.emptyMap(); - public List getActiveUniqueSearchParams(String theResourceName) { - List retval = myActiveUniqueSearchParams.get(theResourceName); + public List getActiveUniqueSearchParams(String theResourceName) { + List retval = myActiveUniqueSearchParams.get(theResourceName); if (retval == null) { retval = Collections.emptyList(); } return retval; } - public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { - Map, List> paramNamesToParams = myActiveParamNamesToUniqueSearchParams.get(theResourceName); + public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + Map, List> paramNamesToParams = myActiveParamNamesToUniqueSearchParams.get(theResourceName); if (paramNamesToParams == null) { return Collections.emptyList(); } - List retVal = paramNamesToParams.get(theParamNames); + List retVal = paramNamesToParams.get(theParamNames); if (retVal == null) { retVal = Collections.emptyList(); } @@ -71,18 +72,18 @@ public class JpaSearchParamCache { } void populateActiveSearchParams(IInterceptorService theInterceptorBroadcaster, IPhoneticEncoder theDefaultPhoneticEncoder, RuntimeSearchParamCache theActiveSearchParams) { - Map> activeUniqueSearchParams = new HashMap<>(); - Map, List>> activeParamNamesToUniqueSearchParams = new HashMap<>(); + Map> activeUniqueSearchParams = new HashMap<>(); + Map, List>> activeParamNamesToUniqueSearchParams = new HashMap<>(); Map idToRuntimeSearchParam = new HashMap<>(); - List jpaSearchParams = new ArrayList<>(); + List jpaSearchParams = new ArrayList<>(); /* * Loop through parameters and find JPA params */ for (String theResourceName : theActiveSearchParams.getResourceNameKeys()) { Map searchParamMap = theActiveSearchParams.getSearchParamMap(theResourceName); - List uniqueSearchParams = activeUniqueSearchParams.computeIfAbsent(theResourceName, k -> new ArrayList<>()); + List uniqueSearchParams = activeUniqueSearchParams.computeIfAbsent(theResourceName, k -> new ArrayList<>()); Collection nextSearchParamsForResourceName = searchParamMap.values(); ourLog.trace("Resource {} has {} params", theResourceName, searchParamMap.size()); @@ -94,13 +95,14 @@ public class JpaSearchParamCache { if (nextCandidate.getId() != null) { idToRuntimeSearchParam.put(nextCandidate.getId().toUnqualifiedVersionless().getValue(), nextCandidate); } + if (isNotBlank(nextCandidate.getUri())) { + idToRuntimeSearchParam.put(nextCandidate.getUri(), nextCandidate); + } - if (nextCandidate instanceof JpaRuntimeSearchParam) { - JpaRuntimeSearchParam nextCandidateCasted = (JpaRuntimeSearchParam) nextCandidate; - jpaSearchParams.add(nextCandidateCasted); - if (nextCandidateCasted.isUnique()) { - uniqueSearchParams.add(nextCandidateCasted); - } + RuntimeSearchParam nextCandidateCasted = nextCandidate; + jpaSearchParams.add(nextCandidateCasted); + if (nextCandidateCasted.isUnique()) { + uniqueSearchParams.add(nextCandidateCasted); } setPhoneticEncoder(theDefaultPhoneticEncoder, nextCandidate); @@ -111,25 +113,19 @@ public class JpaSearchParamCache { ourLog.trace("Have {} search params loaded", idToRuntimeSearchParam.size()); Set haveSeen = new HashSet<>(); - for (JpaRuntimeSearchParam next : jpaSearchParams) { - if (!haveSeen.add(next.getId().toUnqualifiedVersionless().getValue())) { + for (RuntimeSearchParam next : jpaSearchParams) { + if (next.getId() != null && !haveSeen.add(next.getId().toUnqualifiedVersionless().getValue())) { continue; } - Set paramNames = new HashSet<>(); - for (JpaRuntimeSearchParam.Component nextComponent : next.getComponents()) { - String nextRef = nextComponent.getReference().getReferenceElement().toUnqualifiedVersionless().getValue(); + Set paramNames = new TreeSet<>(); + for (RuntimeSearchParam.Component nextComponent : next.getComponents()) { + String nextRef = nextComponent.getReference(); RuntimeSearchParam componentTarget = idToRuntimeSearchParam.get(nextRef); if (componentTarget != null) { - next.getCompositeOf().add(componentTarget); paramNames.add(componentTarget.getName()); } else { - String existingParams = idToRuntimeSearchParam - .keySet() - .stream() - .sorted() - .collect(Collectors.joining(", ")); - String message = "Search parameter " + next.getId().toUnqualifiedVersionless().getValue() + " refers to unknown component " + nextRef + ", ignoring this parameter (valid values: " + existingParams + ")"; + String message = "Search parameter " + next + " refers to unknown component " + nextRef + ", ignoring this parameter"; ourLog.warn(message); // Interceptor broadcast: JPA_PERFTRACE_WARNING @@ -141,8 +137,7 @@ public class JpaSearchParamCache { } } - if (next.getCompositeOf() != null) { - next.getCompositeOf().sort((theO1, theO2) -> StringUtils.compare(theO1.getName(), theO2.getName())); + if (next.isUnique()) { for (String nextBase : next.getBase()) { activeParamNamesToUniqueSearchParams.computeIfAbsent(nextBase, v -> new HashMap<>()); activeParamNamesToUniqueSearchParams.get(nextBase).computeIfAbsent(paramNames, t -> new ArrayList<>()); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java index 2fd7748dce6..d173693aae8 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java @@ -23,30 +23,53 @@ package ca.uhn.fhir.jpa.searchparam.registry; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; -import ca.uhn.fhir.rest.api.Constants; -import org.hl7.fhir.instance.model.api.IAnyResource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; -import java.util.TreeSet; import java.util.stream.Stream; public class ReadOnlySearchParamCache { - private static final Logger ourLog = LoggerFactory.getLogger(ReadOnlySearchParamCache.class); - // resourceName -> searchParamName -> searchparam - protected final Map> myMap; + // resourceName -> searchParamName -> searchparam + protected final Map> myResourceNameToSpNameToSp; + protected final Map myUrlToParam; + + /** + * Constructor + */ ReadOnlySearchParamCache() { - myMap = new HashMap<>(); + myResourceNameToSpNameToSp = new HashMap<>(); + myUrlToParam = new HashMap<>(); } + /** + * Copy constructor + */ private ReadOnlySearchParamCache(RuntimeSearchParamCache theRuntimeSearchParamCache) { - myMap = theRuntimeSearchParamCache.myMap; + myResourceNameToSpNameToSp = theRuntimeSearchParamCache.myResourceNameToSpNameToSp; + myUrlToParam = theRuntimeSearchParamCache.myUrlToParam; + } + + public Stream getSearchParamStream() { + return myResourceNameToSpNameToSp.values().stream().flatMap(entry -> entry.values().stream()); + } + + protected Map getSearchParamMap(String theResourceName) { + Map retval = myResourceNameToSpNameToSp.get(theResourceName); + if (retval == null) { + return Collections.emptyMap(); + } + return Collections.unmodifiableMap(myResourceNameToSpNameToSp.get(theResourceName)); + } + + public int size() { + return myResourceNameToSpNameToSp.size(); + } + + public RuntimeSearchParam getByUrl(String theUrl) { + return myUrlToParam.get(theUrl); } public static ReadOnlySearchParamCache fromFhirContext(FhirContext theFhirContext) { @@ -58,7 +81,7 @@ public class ReadOnlySearchParamCache { RuntimeResourceDefinition nextResDef = theFhirContext.getResourceDefinition(resourceName); String nextResourceName = nextResDef.getName(); HashMap nameToParam = new HashMap<>(); - retval.myMap.put(nextResourceName, nameToParam); + retval.myResourceNameToSpNameToSp.put(nextResourceName, nameToParam); for (RuntimeSearchParam nextSp : nextResDef.getSearchParams()) { nameToParam.put(nextSp.getName(), nextSp); @@ -70,33 +93,4 @@ public class ReadOnlySearchParamCache { public static ReadOnlySearchParamCache fromRuntimeSearchParamCache(RuntimeSearchParamCache theRuntimeSearchParamCache) { return new ReadOnlySearchParamCache(theRuntimeSearchParamCache); } - - public Stream getSearchParamStream() { - return myMap.values().stream().flatMap(entry -> entry.values().stream()); - } - - protected Map getSearchParamMap(String theResourceName) { - Map retval = myMap.get(theResourceName); - if (retval == null) { - return Collections.emptyMap(); - } - return Collections.unmodifiableMap(myMap.get(theResourceName)); - } - - public Collection getValidSearchParameterNamesIncludingMeta(String theResourceName) { - TreeSet retval; - Map searchParamMap = myMap.get(theResourceName); - if (searchParamMap == null) { - retval = new TreeSet<>(); - } else { - retval = new TreeSet<>(searchParamMap.keySet()); - } - retval.add(IAnyResource.SP_RES_ID); - retval.add(Constants.PARAM_LASTUPDATED); - return retval; - } - - public int size() { - return myMap.size(); - } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java index 456a38a933f..5b431fa56e1 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java @@ -28,35 +28,42 @@ import java.util.HashMap; import java.util.Map; import java.util.Set; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + public class RuntimeSearchParamCache extends ReadOnlySearchParamCache { private static final Logger ourLog = LoggerFactory.getLogger(RuntimeSearchParamCache.class); protected RuntimeSearchParamCache() { } - public static RuntimeSearchParamCache fromReadOnlySearchParmCache(ReadOnlySearchParamCache theBuiltInSearchParams) { - RuntimeSearchParamCache retval = new RuntimeSearchParamCache(); - retval.putAll(theBuiltInSearchParams); - return retval; - } - public void add(String theResourceName, String theName, RuntimeSearchParam theSearchParam) { getSearchParamMap(theResourceName).put(theName, theSearchParam); + String uri = theSearchParam.getUri(); + if (isNotBlank(uri)) { + if (myUrlToParam.containsKey(uri)) { + ourLog.warn("Multiple search parameters have URL: {}", uri); + } + myUrlToParam.put(uri, theSearchParam); + } + if (theSearchParam.getId() != null && theSearchParam.getId().hasIdPart()) { + String value = theSearchParam.getId().toUnqualifiedVersionless().getValue(); + myUrlToParam.put(value, theSearchParam); + } } public void remove(String theResourceName, String theName) { - if (!myMap.containsKey(theResourceName)) { + if (!myResourceNameToSpNameToSp.containsKey(theResourceName)) { return; } - myMap.get(theResourceName).remove(theName); + myResourceNameToSpNameToSp.get(theResourceName).remove(theName); } private void putAll(ReadOnlySearchParamCache theReadOnlySearchParamCache) { - Set>> builtInSps = theReadOnlySearchParamCache.myMap.entrySet(); + Set>> builtInSps = theReadOnlySearchParamCache.myResourceNameToSpNameToSp.entrySet(); for (Map.Entry> nextBuiltInEntry : builtInSps) { for (RuntimeSearchParam nextParam : nextBuiltInEntry.getValue().values()) { String nextResourceName = nextBuiltInEntry.getKey(); - getSearchParamMap(nextResourceName).put(nextParam.getName(), nextParam); + add(nextResourceName, nextParam.getName(), nextParam); } ourLog.trace("Have {} built-in SPs for: {}", nextBuiltInEntry.getValue().size(), nextBuiltInEntry.getKey()); @@ -65,7 +72,7 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache { public RuntimeSearchParam get(String theResourceName, String theParamName) { RuntimeSearchParam retVal = null; - Map params = myMap.get(theResourceName); + Map params = myResourceNameToSpNameToSp.get(theResourceName); if (params != null) { retVal = params.get(theParamName); } @@ -73,11 +80,17 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache { } public Set getResourceNameKeys() { - return myMap.keySet(); + return myResourceNameToSpNameToSp.keySet(); } @Override protected Map getSearchParamMap(String theResourceName) { - return myMap.computeIfAbsent(theResourceName, k -> new HashMap<>()); + return myResourceNameToSpNameToSp.computeIfAbsent(theResourceName, k -> new HashMap<>()); + } + + public static RuntimeSearchParamCache fromReadOnlySearchParmCache(ReadOnlySearchParamCache theBuiltInSearchParams) { + RuntimeSearchParamCache retval = new RuntimeSearchParamCache(); + retval.putAll(theBuiltInSearchParams); + return retval; } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java index 7dafdd74610..a6cf42636aa 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.searchparam.registry; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.phonetic.IPhoneticEncoder; import ca.uhn.fhir.interceptor.api.IInterceptorService; @@ -31,10 +30,10 @@ import ca.uhn.fhir.jpa.cache.IResourceChangeListenerCache; import ca.uhn.fhir.jpa.cache.IResourceChangeListenerRegistry; import ca.uhn.fhir.jpa.cache.ResourceChangeResult; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.SearchParameterUtil; import ca.uhn.fhir.util.StopWatch; import com.google.common.annotations.VisibleForTesting; @@ -46,6 +45,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.Nullable; import javax.annotation.Nonnull; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; @@ -58,11 +58,13 @@ import java.util.Set; import static org.apache.commons.lang3.StringUtils.isBlank; -public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceChangeListener { +public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceChangeListener, ISearchParamRegistryController { + // TODO: JA remove unused? + private static final Logger ourLog = LoggerFactory.getLogger(SearchParamRegistryImpl.class); private static final int MAX_MANAGED_PARAM_COUNT = 10000; private static final long REFRESH_INTERVAL = DateUtils.MILLIS_PER_HOUR; - + private final JpaSearchParamCache myJpaSearchParamCache = new JpaSearchParamCache(); @Autowired private ModelConfig myModelConfig; @Autowired @@ -73,10 +75,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC private SearchParameterCanonicalizer mySearchParameterCanonicalizer; @Autowired private IResourceChangeListenerRegistry myResourceChangeListenerRegistry; - private volatile ReadOnlySearchParamCache myBuiltInSearchParams; private volatile IPhoneticEncoder myPhoneticEncoder; - private final JpaSearchParamCache myJpaSearchParamCache = new JpaSearchParamCache(); private volatile RuntimeSearchParamCache myActiveSearchParams; @Autowired @@ -109,15 +109,25 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC } @Override - public List getActiveUniqueSearchParams(String theResourceName) { + public List getActiveUniqueSearchParams(String theResourceName) { return myJpaSearchParamCache.getActiveUniqueSearchParams(theResourceName); } @Override - public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { return myJpaSearchParamCache.getActiveUniqueSearchParams(theResourceName, theParamNames); } + @Nullable + @Override + public RuntimeSearchParam getActiveSearchParamByUrl(String theUrl) { + if (myActiveSearchParams != null) { + return myActiveSearchParams.getByUrl(theUrl); + } else { + return null; + } + } + private void rebuildActiveSearchParams() { ourLog.info("Rebuilding SearchParamRegistry"); SearchParameterMap params = new SearchParameterMap(); @@ -205,26 +215,14 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC continue; } - Map searchParamMap = theSearchParams.getSearchParamMap(nextBaseName); String name = runtimeSp.getName(); + theSearchParams.add(nextBaseName, name, runtimeSp); ourLog.debug("Adding search parameter {}.{} to SearchParamRegistry", nextBaseName, StringUtils.defaultString(name, "[composite]")); - searchParamMap.put(name, runtimeSp); retval++; } return retval; } - @Override - public RuntimeSearchParam getSearchParamByName(RuntimeResourceDefinition theResourceDef, String theParamName) { - Map params = getActiveSearchParams(theResourceDef.getName()); - return params.get(theParamName); - } - - @Override - public Collection getSearchParamsByResourceType(RuntimeResourceDefinition theResourceDef) { - return getActiveSearchParams(theResourceDef.getName()).values(); - } - @Override public void requestRefresh() { myResourceChangeListenerCache.requestRefresh(); @@ -255,7 +253,6 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC myResourceChangeListenerRegistry.unregisterResourceResourceChangeListener(this); } - @Override public ReadOnlySearchParamCache getActiveSearchParams() { requiresActiveSearchParams(); if (myActiveSearchParams == null) { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java index d134a8c7ba9..29ec2044f90 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java @@ -23,12 +23,10 @@ package ca.uhn.fhir.jpa.searchparam.registry; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.phonetic.PhoneticEncoderEnum; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.model.api.ExtensionDt; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.DatatypeUtil; -import ca.uhn.fhir.util.ExtensionUtil; import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.HapiExtensions; import org.apache.commons.lang3.EnumUtils; @@ -41,7 +39,6 @@ import org.hl7.fhir.instance.model.api.IBaseHasExtensions; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.hl7.fhir.r4.model.Reference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -57,6 +54,7 @@ import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.startsWith; @Service public class SearchParameterCanonicalizer { @@ -70,7 +68,7 @@ public class SearchParameterCanonicalizer { } protected RuntimeSearchParam canonicalizeSearchParameter(IBaseResource theSearchParameter) { - JpaRuntimeSearchParam retVal; + RuntimeSearchParam retVal; switch (myFhirContext.getVersion().getVersion()) { case DSTU2: retVal = canonicalizeSearchParameterDstu2((ca.uhn.fhir.model.dstu2.resource.SearchParameter) theSearchParameter); @@ -92,7 +90,7 @@ public class SearchParameterCanonicalizer { return retVal; } - private JpaRuntimeSearchParam canonicalizeSearchParameterDstu2(ca.uhn.fhir.model.dstu2.resource.SearchParameter theNextSp) { + private RuntimeSearchParam canonicalizeSearchParameterDstu2(ca.uhn.fhir.model.dstu2.resource.SearchParameter theNextSp) { String name = theNextSp.getCode(); String description = theNextSp.getDescription(); String path = theNextSp.getXpath(); @@ -162,12 +160,12 @@ public class SearchParameterCanonicalizer { } } - List components = Collections.emptyList(); + List components = Collections.emptyList(); Collection> base = Collections.singletonList(theNextSp.getBaseElement()); - return new JpaRuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, toStrings(base)); + return new RuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, toStrings(base)); } - private JpaRuntimeSearchParam canonicalizeSearchParameterDstu3(org.hl7.fhir.dstu3.model.SearchParameter theNextSp) { + private RuntimeSearchParam canonicalizeSearchParameterDstu3(org.hl7.fhir.dstu3.model.SearchParameter theNextSp) { String name = theNextSp.getCode(); String description = theNextSp.getDescription(); String path = theNextSp.getExpression(); @@ -242,15 +240,15 @@ public class SearchParameterCanonicalizer { } } - List components = new ArrayList<>(); + List components = new ArrayList<>(); for (SearchParameter.SearchParameterComponentComponent next : theNextSp.getComponent()) { - components.add(new JpaRuntimeSearchParam.Component(next.getExpression(), next.getDefinition())); + components.add(new RuntimeSearchParam.Component(next.getExpression(), next.getDefinition().getReferenceElement().toUnqualifiedVersionless().getValue())); } - return new JpaRuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, toStrings(theNextSp.getBase())); + return new RuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, toStrings(theNextSp.getBase())); } - private JpaRuntimeSearchParam canonicalizeSearchParameterR4Plus(IBaseResource theNextSp) { + private RuntimeSearchParam canonicalizeSearchParameterR4Plus(IBaseResource theNextSp) { FhirTerser terser = myFhirContext.newTerser(); String name = terser.getSinglePrimitiveValueOrNull(theNextSp, "code"); String description = terser.getSinglePrimitiveValueOrNull(theNextSp, "description"); @@ -318,30 +316,34 @@ public class SearchParameterCanonicalizer { String value = ((IBaseHasExtensions) theNextSp).getExtension() .stream() .filter(e -> HapiExtensions.EXT_SP_UNIQUE.equals(e.getUrl())) - .filter(t->t.getValue() instanceof IPrimitiveType) - .map(t->(IPrimitiveType)t.getValue()) - .map(t->t.getValueAsString()) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> (IPrimitiveType) t.getValue()) + .map(t -> t.getValueAsString()) .findFirst() .orElse(""); if ("true".equalsIgnoreCase(value)) { unique = true; } - List components = new ArrayList<>(); + List components = new ArrayList<>(); for (IBase next : terser.getValues(theNextSp, "component")) { String expression = terser.getSinglePrimitiveValueOrNull(next, "expression"); String definition = terser.getSinglePrimitiveValueOrNull(next, "definition"); - components.add(new JpaRuntimeSearchParam.Component(expression, new Reference(definition))); + if (startsWith(definition, "/SearchParameter/")) { + definition = definition.substring(1); + } + + components.add(new RuntimeSearchParam.Component(expression, definition)); } - return new JpaRuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, base); + return new RuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, base); } /** * Extracts any extensions from the resource and populates an extension field in the */ - protected void extractExtensions(IBaseResource theSearchParamResource, JpaRuntimeSearchParam theRuntimeSearchParam) { + protected void extractExtensions(IBaseResource theSearchParamResource, RuntimeSearchParam theRuntimeSearchParam) { if (theSearchParamResource instanceof IBaseHasExtensions) { List> extensions = ((IBaseHasExtensions) theSearchParamResource).getExtension(); for (IBaseExtension next : extensions) { @@ -356,7 +358,7 @@ public class SearchParameterCanonicalizer { } } - private void setEncoder(JpaRuntimeSearchParam theRuntimeSearchParam, IBaseDatatype theValue) { + private void setEncoder(RuntimeSearchParam theRuntimeSearchParam, IBaseDatatype theValue) { if (theValue instanceof IPrimitiveType) { String stringValue = ((IPrimitiveType) theValue).getValueAsString(); PhoneticEncoderEnum encoderEnum = EnumUtils.getEnum(PhoneticEncoderEnum.class, stringValue); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java new file mode 100644 index 00000000000..2f64222d586 --- /dev/null +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java @@ -0,0 +1,194 @@ +package ca.uhn.fhir.jpa.searchparam.util; + +/*- + * #%L + * HAPI FHIR Search Parameters + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.model.api.IQueryParameterAnd; +import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.rest.api.QualifiedParamList; +import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; +import ca.uhn.fhir.rest.param.CompositeAndListParam; +import ca.uhn.fhir.rest.param.DateAndListParam; +import ca.uhn.fhir.rest.param.DateParam; +import ca.uhn.fhir.rest.param.HasAndListParam; +import ca.uhn.fhir.rest.param.HasParam; +import ca.uhn.fhir.rest.param.NumberAndListParam; +import ca.uhn.fhir.rest.param.NumberParam; +import ca.uhn.fhir.rest.param.QuantityAndListParam; +import ca.uhn.fhir.rest.param.QuantityParam; +import ca.uhn.fhir.rest.param.ReferenceAndListParam; +import ca.uhn.fhir.rest.param.ReferenceParam; +import ca.uhn.fhir.rest.param.SpecialAndListParam; +import ca.uhn.fhir.rest.param.SpecialParam; +import ca.uhn.fhir.rest.param.StringAndListParam; +import ca.uhn.fhir.rest.param.StringParam; +import ca.uhn.fhir.rest.param.TokenAndListParam; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.param.UriAndListParam; +import ca.uhn.fhir.rest.param.UriParam; +import ca.uhn.fhir.rest.param.binder.QueryParameterAndBinder; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +public enum JpaParamUtil { + + ; + + /** + * This is a utility method intended provided to help the JPA module. + */ + public static IQueryParameterAnd parseQueryParams(FhirContext theContext, RestSearchParameterTypeEnum paramType, + String theUnqualifiedParamName, List theParameters) { + QueryParameterAndBinder binder; + switch (paramType) { + case COMPOSITE: + throw new UnsupportedOperationException(); + case DATE: + binder = new QueryParameterAndBinder(DateAndListParam.class, + Collections.emptyList()); + break; + case NUMBER: + binder = new QueryParameterAndBinder(NumberAndListParam.class, + Collections.emptyList()); + break; + case QUANTITY: + binder = new QueryParameterAndBinder(QuantityAndListParam.class, + Collections.emptyList()); + break; + case REFERENCE: + binder = new QueryParameterAndBinder(ReferenceAndListParam.class, + Collections.emptyList()); + break; + case STRING: + binder = new QueryParameterAndBinder(StringAndListParam.class, + Collections.emptyList()); + break; + case TOKEN: + binder = new QueryParameterAndBinder(TokenAndListParam.class, + Collections.emptyList()); + break; + case URI: + binder = new QueryParameterAndBinder(UriAndListParam.class, + Collections.emptyList()); + break; + case HAS: + binder = new QueryParameterAndBinder(HasAndListParam.class, + Collections.emptyList()); + break; + case SPECIAL: + binder = new QueryParameterAndBinder(SpecialAndListParam.class, + Collections.emptyList()); + break; + default: + throw new IllegalArgumentException("Parameter '" + theUnqualifiedParamName + "' has type " + paramType + " which is currently not supported."); + } + + return binder.parse(theContext, theUnqualifiedParamName, theParameters); + } + + /** + * This is a utility method intended provided to help the JPA module. + */ + public static IQueryParameterAnd parseQueryParams(ISearchParamRegistry theSearchParamRegistry, FhirContext theContext, RuntimeSearchParam theParamDef, + String theUnqualifiedParamName, List theParameters) { + + RestSearchParameterTypeEnum paramType = theParamDef.getParamType(); + + if (paramType == RestSearchParameterTypeEnum.COMPOSITE) { + + List compositeList = resolveComponentParameters(theSearchParamRegistry, theParamDef); + + if (compositeList.size() != 2) { + throw new ConfigurationException("Search parameter of type " + theUnqualifiedParamName + + " must have 2 composite types declared in parameter annotation, found " + + compositeList.size()); + } + + RuntimeSearchParam left = compositeList.get(0); + RuntimeSearchParam right = compositeList.get(1); + + @SuppressWarnings({"unchecked", "rawtypes"}) + CompositeAndListParam cp = new CompositeAndListParam( + getCompositeBindingClass(left.getParamType(), left.getName()), + getCompositeBindingClass(right.getParamType(), right.getName())); + + cp.setValuesAsQueryTokens(theContext, theUnqualifiedParamName, theParameters); + + return cp; + } else { + return parseQueryParams(theContext, paramType, theUnqualifiedParamName, theParameters); + } + } + + public static List resolveComponentParameters(ISearchParamRegistry theSearchParamRegistry, RuntimeSearchParam theParamDef) { + List compositeList = new ArrayList<>(); + List components = theParamDef.getComponents(); + for (RuntimeSearchParam.Component next : components) { + String url = next.getReference(); + RuntimeSearchParam componentParam = theSearchParamRegistry.getActiveSearchParamByUrl(url); + if (componentParam == null) { + throw new InternalErrorException("Can not find SearchParameter: " + url); + } + compositeList.add(componentParam); + } + + compositeList.sort((Comparator.comparing(RuntimeSearchParam::getName))); + + return compositeList; + } + + private static Class getCompositeBindingClass(RestSearchParameterTypeEnum paramType, + String theUnqualifiedParamName) { + + switch (paramType) { + case DATE: + return DateParam.class; + case NUMBER: + return NumberParam.class; + case QUANTITY: + return QuantityParam.class; + case REFERENCE: + return ReferenceParam.class; + case STRING: + return StringParam.class; + case TOKEN: + return TokenParam.class; + case URI: + return UriParam.class; + case HAS: + return HasParam.class; + case SPECIAL: + return SpecialParam.class; + + case COMPOSITE: + default: + throw new IllegalArgumentException("Parameter '" + theUnqualifiedParamName + "' has type " + paramType + + " which is currently not supported."); + } + } +} diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/IndexStressTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/IndexStressTest.java index bbb7ef23b6b..f1a84f721d4 100644 --- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/IndexStressTest.java +++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/IndexStressTest.java @@ -7,7 +7,7 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu3; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.StopWatch; import org.hl7.fhir.dstu3.model.Patient; import org.junit.jupiter.api.Test; diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java index 4a6106d9fea..d1f1153d1f1 100644 --- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java +++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java @@ -16,11 +16,11 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.SearchParamConstants; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController; import ca.uhn.fhir.jpa.searchparam.registry.ReadOnlySearchParamCache; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.StringUtil; import ca.uhn.fhir.util.TestUtil; import com.google.common.collect.Sets; @@ -34,9 +34,9 @@ import org.hl7.fhir.dstu3.model.Questionnaire; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; +import javax.annotation.Nullable; import java.text.Normalizer; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -132,11 +132,11 @@ public class SearchParamExtractorDstu3Test { SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new ModelConfig(), new PartitionSettings(), ourCtx, searchParamRegistry); extractor.start(); - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "", RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "", RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); extractor.extractSearchParamStrings(resource); - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", null, RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", null, RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); extractor.extractSearchParamStrings(resource); } @@ -148,7 +148,7 @@ public class SearchParamExtractorDstu3Test { SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new ModelConfig(), new PartitionSettings(), ourCtx, searchParamRegistry); extractor.start(); - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "communication.language.coding.system | communication.language.coding.code", RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "communication.language.coding.system | communication.language.coding.code", RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); resource.getCommunicationFirstRep().getLanguage().getCodingFirstRep().setCode("blah"); Set strings = extractor.extractSearchParamStrings(resource); @@ -166,37 +166,37 @@ public class SearchParamExtractorDstu3Test { extractor.start(); { - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "Patient", RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "Patient", RestSearchParameterTypeEnum.STRING, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); ISearchParamExtractor.SearchParamSet outcome = extractor.extractSearchParamStrings(resource); assertThat(outcome.getWarnings(), Matchers.contains("Search param foo is of unexpected datatype: class org.hl7.fhir.dstu3.model.Patient")); } { - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "Patient", RestSearchParameterTypeEnum.TOKEN, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "Patient", RestSearchParameterTypeEnum.TOKEN, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); ISearchParamExtractor.SearchParamSet outcome = extractor.extractSearchParamTokens(resource); assertThat(outcome.getWarnings(), Matchers.contains("Search param foo is of unexpected datatype: class org.hl7.fhir.dstu3.model.Patient")); } { - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "Patient", RestSearchParameterTypeEnum.QUANTITY, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "Patient", RestSearchParameterTypeEnum.QUANTITY, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); ISearchParamExtractor.SearchParamSet outcome = extractor.extractSearchParamQuantity(resource); assertThat(outcome.getWarnings(), Matchers.contains("Search param foo is of unexpected datatype: class org.hl7.fhir.dstu3.model.Patient")); } { - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "Patient", RestSearchParameterTypeEnum.DATE, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "Patient", RestSearchParameterTypeEnum.DATE, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); ISearchParamExtractor.SearchParamSet outcome = extractor.extractSearchParamDates(resource); assertThat(outcome.getWarnings(), Matchers.contains("Search param foo is of unexpected datatype: class org.hl7.fhir.dstu3.model.Patient")); } { - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "Patient", RestSearchParameterTypeEnum.NUMBER, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "Patient", RestSearchParameterTypeEnum.NUMBER, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); ISearchParamExtractor.SearchParamSet outcome = extractor.extractSearchParamNumber(resource); assertThat(outcome.getWarnings(), Matchers.contains("Search param foo is of unexpected datatype: class org.hl7.fhir.dstu3.model.Patient")); } { - searchParamRegistry.addSearchParam(new RuntimeSearchParam("foo", "foo", "Patient", RestSearchParameterTypeEnum.URI, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE)); + searchParamRegistry.addSearchParam(new RuntimeSearchParam(null, null, "foo", "foo", "Patient", RestSearchParameterTypeEnum.URI, Sets.newHashSet(), Sets.newHashSet(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); Patient resource = new Patient(); ISearchParamExtractor.SearchParamSet outcome = extractor.extractSearchParamUri(resource); assertThat(outcome.getWarnings(), Matchers.contains("Search param foo is of unexpected datatype: class org.hl7.fhir.dstu3.model.Patient")); @@ -222,7 +222,9 @@ public class SearchParamExtractorDstu3Test { assertEquals(longitude, coord.getLongitude(), 0.0); } - private static class MySearchParamRegistry implements ISearchParamRegistry { + private static class MySearchParamRegistry implements ISearchParamRegistry, ISearchParamRegistryController { + + // TODO: JA remove unused? private final List myAddedSearchParams = new ArrayList<>(); @@ -249,7 +251,6 @@ public class SearchParamExtractorDstu3Test { return new ResourceChangeResult(); } - @Override public ReadOnlySearchParamCache getActiveSearchParams() { throw new UnsupportedOperationException(); } @@ -268,12 +269,18 @@ public class SearchParamExtractorDstu3Test { } @Override - public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public RuntimeSearchParam getActiveSearchParamByUrl(String theUrl) { throw new UnsupportedOperationException(); } @Override - public List getActiveUniqueSearchParams(String theResourceName) { + public List getActiveUniqueSearchParams(String theResourceName) { throw new UnsupportedOperationException(); } @@ -282,16 +289,6 @@ public class SearchParamExtractorDstu3Test { // nothing } - @Override - public RuntimeSearchParam getSearchParamByName(RuntimeResourceDefinition theResourceDef, String theParamName) { - return null; - } - - @Override - public Collection getSearchParamsByResourceType(RuntimeResourceDefinition theResourceDef) { - return null; - } - @Override public void setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) { // nothing diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorMegaTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorMegaTest.java index 97185778d9a..e3a24cc540b 100644 --- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorMegaTest.java +++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorMegaTest.java @@ -20,9 +20,9 @@ import ca.uhn.fhir.context.phonetic.IPhoneticEncoder; import ca.uhn.fhir.jpa.cache.ResourceChangeResult; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController; import ca.uhn.fhir.jpa.searchparam.registry.ReadOnlySearchParamCache; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseEnumeration; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -32,8 +32,8 @@ import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.annotation.Nullable; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -45,6 +45,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; // TODO JA Please fix this test. Expanding FhirContext.getResourceTypes() to cover all resource types broke this test. @Disabled public class SearchParamExtractorMegaTest { + // TODO: JA remove unused? private static final Logger ourLog = LoggerFactory.getLogger(SearchParamExtractorMegaTest.class); @@ -231,7 +232,7 @@ public class SearchParamExtractorMegaTest { } - private static class MySearchParamRegistry implements ISearchParamRegistry { + private static class MySearchParamRegistry implements ISearchParamRegistry, ISearchParamRegistryController { private final FhirContext myCtx; private List myAddedSearchParams = new ArrayList<>(); @@ -263,7 +264,6 @@ public class SearchParamExtractorMegaTest { return new ResourceChangeResult(); } - @Override public ReadOnlySearchParamCache getActiveSearchParams() { throw new UnsupportedOperationException(); } @@ -282,12 +282,18 @@ public class SearchParamExtractorMegaTest { } @Override - public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + public List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public RuntimeSearchParam getActiveSearchParamByUrl(String theUrl) { throw new UnsupportedOperationException(); } @Override - public List getActiveUniqueSearchParams(String theResourceName) { + public List getActiveUniqueSearchParams(String theResourceName) { throw new UnsupportedOperationException(); } @@ -296,16 +302,6 @@ public class SearchParamExtractorMegaTest { // nothing } - @Override - public RuntimeSearchParam getSearchParamByName(RuntimeResourceDefinition theResourceDef, String theParamName) { - return null; - } - - @Override - public Collection getSearchParamsByResourceType(RuntimeResourceDefinition theResourceDef) { - return null; - } - @Override public void setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) { // nothing diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java index 17c1d56f707..c048ca05390 100644 --- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java +++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java @@ -7,12 +7,12 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; import ca.uhn.fhir.model.primitive.BaseDateTimeDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.param.ParamPrefixEnum; import ca.uhn.fhir.rest.param.TokenParamModifier; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.hl7.fhir.r5.model.BaseDateTimeType; import org.hl7.fhir.r5.model.CodeableConcept; import org.hl7.fhir.r5.model.DateTimeType; @@ -34,8 +34,6 @@ import java.util.Date; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; @ExtendWith(SpringExtension.class) @@ -60,16 +58,13 @@ public class InMemoryResourceMatcherR5Test { @BeforeEach public void before() { - RuntimeSearchParam dateSearchParam = new RuntimeSearchParam(null, null, null, null, "Observation.effective", RestSearchParameterTypeEnum.DATE, null, null, null, RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE); - when(mySearchParamRegistry.getSearchParamByName(any(), eq("date"))).thenReturn(dateSearchParam); + RuntimeSearchParam dateSearchParam = new RuntimeSearchParam(null, null, null, null, "Observation.effective", RestSearchParameterTypeEnum.DATE, null, null, RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null); when(mySearchParamRegistry.getActiveSearchParam("Observation", "date")).thenReturn(dateSearchParam); - RuntimeSearchParam codeSearchParam = new RuntimeSearchParam(null, null, null, null, "Observation.code", RestSearchParameterTypeEnum.TOKEN, null, null, null, RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE); - when(mySearchParamRegistry.getSearchParamByName(any(), eq("code"))).thenReturn(codeSearchParam); + RuntimeSearchParam codeSearchParam = new RuntimeSearchParam(null, null, null, null, "Observation.code", RestSearchParameterTypeEnum.TOKEN, null, null, RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null); when(mySearchParamRegistry.getActiveSearchParam("Observation", "code")).thenReturn(codeSearchParam); - RuntimeSearchParam encSearchParam = new RuntimeSearchParam(null, null, null, null, "Observation.encounter", RestSearchParameterTypeEnum.REFERENCE, null, null, null, RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE); - when(mySearchParamRegistry.getSearchParamByName(any(), eq("encounter"))).thenReturn(encSearchParam); + RuntimeSearchParam encSearchParam = new RuntimeSearchParam(null, null, null, null, "Observation.encounter", RestSearchParameterTypeEnum.REFERENCE, null, null, RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null); when(mySearchParamRegistry.getActiveSearchParam("Observation", "encounter")).thenReturn(encSearchParam); myObservation = new Observation(); diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImplTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImplTest.java index 0deeb0b7a9e..53cbc763578 100644 --- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImplTest.java +++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImplTest.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.server.SimpleBundleProvider; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.Enumerations; @@ -115,7 +116,7 @@ public class SearchParamRegistryImplTest { } @Bean - ISearchParamRegistry searchParamRegistry() { + ISearchParamRegistry searchParamRegistry() { return new SearchParamRegistryImpl(); } diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index a0fe13f241d..e1878aacf98 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java index 9c17e84e23d..51093138df4 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java @@ -28,7 +28,7 @@ import ca.uhn.fhir.jpa.cache.IResourceChangeListenerCache; import ca.uhn.fhir.jpa.cache.IResourceChangeListenerRegistry; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.retry.Retrier; import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionActivatingSubscriber; import ca.uhn.fhir.rest.api.server.IBundleProvider; diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/websocket/WebsocketConnectionValidatorTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/websocket/WebsocketConnectionValidatorTest.java index 5fe3911087f..6d94c5bcc07 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/websocket/WebsocketConnectionValidatorTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/websocket/WebsocketConnectionValidatorTest.java @@ -11,7 +11,7 @@ import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory; import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig; diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index 786e475ab72..7f20b4b2dc5 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index 0aa2c29e8f1..9314f0dbd85 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml @@ -164,7 +164,7 @@ ca.uhn.hapi.fhir hapi-fhir-converter - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java index 5fa02d56807..e2c52f3b321 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java @@ -19,7 +19,7 @@ import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3; import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4; import ca.uhn.fhir.jpa.provider.r5.JpaSystemProviderR5; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.subscription.match.config.WebsocketDispatcherConfig; import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator; import ca.uhn.fhir.rest.api.EncodingEnum; diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index d45f8d70524..71c5ec9049a 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java index 27a84c92d87..bbec9526992 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java @@ -33,11 +33,9 @@ import ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson; import ca.uhn.fhir.mdm.rules.json.MdmRulesJson; import ca.uhn.fhir.mdm.rules.json.MdmSimilarityJson; import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.FhirTerser; -import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.r4.model.Patient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -54,12 +52,12 @@ public class MdmRuleValidator implements IMdmRuleValidator { private static final Logger ourLog = LoggerFactory.getLogger(MdmRuleValidator.class); private final FhirContext myFhirContext; - private final ISearchParamRetriever mySearchParamRetriever; + private final ISearchParamRegistry mySearchParamRetriever; private final FhirTerser myTerser; private final IFhirPath myFhirPath; @Autowired - public MdmRuleValidator(FhirContext theFhirContext, ISearchParamRetriever theSearchParamRetriever) { + public MdmRuleValidator(FhirContext theFhirContext, ISearchParamRegistry theSearchParamRetriever) { myFhirContext = theFhirContext; myTerser = myFhirContext.newTerser(); if (myFhirContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) { diff --git a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java index 4c006cd3647..2597d2ad3f1 100644 --- a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java +++ b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java @@ -7,7 +7,7 @@ import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator; import ca.uhn.fhir.mdm.rules.config.MdmSettings; import ca.uhn.fhir.mdm.rules.json.MdmRulesJson; import ca.uhn.fhir.mdm.rules.svc.MdmResourceMatcherSvc; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; @@ -18,7 +18,7 @@ import static org.mockito.Mockito.mock; @ExtendWith(MockitoExtension.class) public abstract class BaseR4Test { protected static final FhirContext ourFhirContext = FhirContext.forR4(); - protected ISearchParamRetriever mySearchParamRetriever = mock(ISearchParamRetriever.class); + protected ISearchParamRegistry mySearchParamRetriever = mock(ISearchParamRegistry.class); protected Patient buildJohn() { Patient patient = new Patient(); diff --git a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/svc/ResourceMatcherR4Test.java b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/svc/ResourceMatcherR4Test.java index 0b105cf1892..8a2342c9398 100644 --- a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/svc/ResourceMatcherR4Test.java +++ b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/svc/ResourceMatcherR4Test.java @@ -7,15 +7,12 @@ import ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson; import ca.uhn.fhir.mdm.rules.json.MdmMatcherJson; import ca.uhn.fhir.mdm.rules.json.MdmRulesJson; import ca.uhn.fhir.mdm.rules.matcher.MdmMatcherEnum; -import ca.uhn.fhir.util.StopWatch; import org.hl7.fhir.r4.model.HumanName; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/svc/EIDHelperR4Test.java b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/svc/EIDHelperR4Test.java index ae93b29fe00..7f7a17bfe78 100644 --- a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/svc/EIDHelperR4Test.java +++ b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/svc/EIDHelperR4Test.java @@ -45,10 +45,7 @@ public class EIDHelperR4Test extends BaseR4Test { @BeforeEach public void before() { when(mySearchParamRetriever.getActiveSearchParam("Patient", "identifier")) - .thenReturn(new RuntimeSearchParam( - "identifier", "Description", "identifier", RestSearchParameterTypeEnum.STRING, - new HashSet<>(), new HashSet<>(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE - )); + .thenReturn(new RuntimeSearchParam(null, null, "identifier", "Description", "identifier", RestSearchParameterTypeEnum.STRING, new HashSet<>(), new HashSet<>(), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, false, null, null)); myMdmSettings = new MdmSettings(new MdmRuleValidator(ourFhirContext, mySearchParamRetriever)) { { diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 7a4d0e9dbfd..bdf8b874600 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java index c7bc35e1811..568e3624149 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java @@ -28,7 +28,7 @@ import ca.uhn.fhir.rest.server.method.BaseMethodBinding; import ca.uhn.fhir.rest.server.method.OperationMethodBinding; import ca.uhn.fhir.rest.server.method.SearchMethodBinding; import ca.uhn.fhir.rest.server.method.SearchParameter; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.VersionUtil; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; @@ -39,6 +39,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -55,7 +56,7 @@ import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isBlank; -public class RestfulServerConfiguration implements ISearchParamRetriever { +public class RestfulServerConfiguration implements ISearchParamRegistry { private static final Logger ourLog = LoggerFactory.getLogger(RestfulServerConfiguration.class); private Collection resourceBindings; @@ -384,6 +385,12 @@ public class RestfulServerConfiguration implements ISearchParamRetriever { return retVal; } + @Nullable + @Override + public RuntimeSearchParam getActiveSearchParamByUrl(String theUrl) { + throw new UnsupportedOperationException(); + } + private void createRuntimeBinding(Map theMapToPopulate, SearchMethodBinding theSearchMethodBinding) { List parameters = theSearchMethodBinding @@ -425,12 +432,11 @@ public class RestfulServerConfiguration implements ISearchParamRetriever { String description = nextParamDescription; String path = null; RestSearchParameterTypeEnum type = nextParameter.getParamType(); - List compositeOf = Collections.emptyList(); Set providesMembershipInCompartments = Collections.emptySet(); Set targets = Collections.emptySet(); RuntimeSearchParam.RuntimeSearchParamStatusEnum status = RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE; Collection base = Collections.singletonList(theSearchMethodBinding.getResourceName()); - RuntimeSearchParam param = new RuntimeSearchParam(id, uri, nextParamName, description, path, type, compositeOf, providesMembershipInCompartments, targets, status, base); + RuntimeSearchParam param = new RuntimeSearchParam(id, uri, nextParamName, description, path, type, providesMembershipInCompartments, targets, status, false, null, base); theMapToPopulate.put(nextParamName, param); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/SearchPreferHandlingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/SearchPreferHandlingInterceptor.java index cee31ba18a5..e9f0e2757e7 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/SearchPreferHandlingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/SearchPreferHandlingInterceptor.java @@ -35,7 +35,7 @@ import ca.uhn.fhir.rest.server.RestfulServerUtils; import ca.uhn.fhir.rest.server.exceptions.AuthenticationException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.method.SearchMethodBinding; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.apache.commons.lang3.Validate; import javax.annotation.Nonnull; @@ -57,7 +57,7 @@ public class SearchPreferHandlingInterceptor { @Nonnull private PreferHandlingEnum myDefaultBehaviour; @Nullable - private ISearchParamRetriever mySearchParamRetriever; + private ISearchParamRegistry mySearchParamRegistry; /** * Constructor that uses the {@link RestfulServer} itself to determine @@ -68,12 +68,12 @@ public class SearchPreferHandlingInterceptor { } /** - * Constructor that uses a dedicated {@link ISearchParamRetriever} instance. This is mainly + * Constructor that uses a dedicated {@link ISearchParamRegistry} instance. This is mainly * intended for the JPA server. */ - public SearchPreferHandlingInterceptor(ISearchParamRetriever theSearchParamRetriever) { + public SearchPreferHandlingInterceptor(ISearchParamRegistry theSearchParamRegistry) { this(); - mySearchParamRetriever = theSearchParamRetriever; + mySearchParamRegistry = theSearchParamRegistry; } @Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLER_SELECTED) @@ -105,7 +105,7 @@ public class SearchPreferHandlingInterceptor { private void removeUnwantedParams(PreferHandlingEnum theHandling, RequestDetails theRequestDetails) { - ISearchParamRetriever searchParamRetriever = mySearchParamRetriever; + ISearchParamRegistry searchParamRetriever = mySearchParamRegistry; if (searchParamRetriever == null) { searchParamRetriever = ((RestfulServer) theRequestDetails.getServer()).createConfiguration(); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java index d64aa433ba6..59913098b86 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java @@ -26,7 +26,7 @@ import ca.uhn.fhir.rest.server.method.OperationParameter; import ca.uhn.fhir.rest.server.method.SearchMethodBinding; import ca.uhn.fhir.rest.server.method.SearchParameter; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import ca.uhn.fhir.rest.server.util.ISearchParamRetriever; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.FhirTerser; import com.google.common.collect.TreeMultimap; import org.hl7.fhir.instance.model.api.IBase; @@ -86,7 +86,7 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv private static final Logger ourLog = LoggerFactory.getLogger(ServerCapabilityStatementProvider.class); private final FhirContext myContext; private final RestfulServer myServer; - private final ISearchParamRetriever mySearchParamRetriever; + private final ISearchParamRegistry mySearchParamRegistry; private final RestfulServerConfiguration myServerConfiguration; private final IValidationSupport myValidationSupport; private String myPublisher = "Not provided"; @@ -98,7 +98,7 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv public ServerCapabilityStatementProvider(RestfulServer theServer) { myServer = theServer; myContext = theServer.getFhirContext(); - mySearchParamRetriever = null; + mySearchParamRegistry = null; myServerConfiguration = null; myValidationSupport = null; } @@ -109,7 +109,7 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv public ServerCapabilityStatementProvider(FhirContext theContext, RestfulServerConfiguration theServerConfiguration) { myContext = theContext; myServerConfiguration = theServerConfiguration; - mySearchParamRetriever = null; + mySearchParamRegistry = null; myServer = null; myValidationSupport = null; } @@ -117,9 +117,9 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv /** * Constructor */ - public ServerCapabilityStatementProvider(RestfulServer theRestfulServer, ISearchParamRetriever theSearchParamRetriever, IValidationSupport theValidationSupport) { + public ServerCapabilityStatementProvider(RestfulServer theRestfulServer, ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) { myContext = theRestfulServer.getFhirContext(); - mySearchParamRetriever = theSearchParamRetriever; + mySearchParamRegistry = theSearchParamRegistry; myServer = theRestfulServer; myServerConfiguration = null; myValidationSupport = theValidationSupport; @@ -349,16 +349,16 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv } - ISearchParamRetriever searchParamRetriever; - if (mySearchParamRetriever != null) { - searchParamRetriever = mySearchParamRetriever; + ISearchParamRegistry searchParamRegistry; + if (mySearchParamRegistry != null) { + searchParamRegistry = mySearchParamRegistry; } else if (myServerConfiguration != null) { - searchParamRetriever = myServerConfiguration; + searchParamRegistry = myServerConfiguration; } else { - searchParamRetriever = myServer.createConfiguration(); + searchParamRegistry = myServer.createConfiguration(); } - Map searchParams = searchParamRetriever.getActiveSearchParams(resourceName); + Map searchParams = searchParamRegistry.getActiveSearchParams(resourceName); for (RuntimeSearchParam next : searchParams.values()) { IBase searchParam = terser.addElement(resource, "searchParam"); terser.addElement(searchParam, "name", next.getName()); @@ -412,7 +412,7 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv continue; } - for (RuntimeSearchParam t : searchParamRetriever + for (RuntimeSearchParam t : searchParamRegistry .getActiveSearchParams(nextResourceName) .values()) { if (t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java new file mode 100644 index 00000000000..b78beffb187 --- /dev/null +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java @@ -0,0 +1,106 @@ +package ca.uhn.fhir.rest.server.util; + +/* + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.context.phonetic.IPhoneticEncoder; +import ca.uhn.fhir.rest.api.Constants; +import org.hl7.fhir.instance.model.api.IAnyResource; + +import javax.annotation.Nullable; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; + +// TODO: JA remove default methods +public interface ISearchParamRegistry { + + /** + * @return Returns {@literal null} if no match + */ + RuntimeSearchParam getActiveSearchParam(String theResourceName, String theParamName); + + /** + * @return Returns all active search params for the given resource + */ + Map getActiveSearchParams(String theResourceName); + + /** + * Request that the cache be refreshed now, in the current thread + */ + default void forceRefresh() { + } + + ; + + /** + * Request that the cache be refreshed at the next convenient time (in a different thread) + */ + default void requestRefresh() { + } + + + /** + * When indexing a HumanName, if a StringEncoder is set in the context, then the "phonetic" search parameter will normalize + * the String using this encoder. + * + * @since 5.1.0 + */ + default void setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) { + } + + default List getActiveUniqueSearchParams(String theResourceName) { + return Collections.emptyList(); + } + + default List getActiveUniqueSearchParams(String theResourceName, Set theParamNames) { + return Collections.emptyList(); + } + + /** + * Returns a collection containing all of the valid active search parameters. This method is intended for + * creating error messages for users as opposed to actual search processing. It will include meta parameters + * such as _id and _lastUpdated. + */ + default Collection getValidSearchParameterNamesIncludingMeta(String theResourceName) { + TreeSet retval; + Map searchParamMap = getActiveSearchParams(theResourceName); + if (searchParamMap == null) { + retval = new TreeSet<>(); + } else { + retval = new TreeSet<>(searchParamMap.keySet()); + } + retval.add(IAnyResource.SP_RES_ID); + retval.add(Constants.PARAM_LASTUPDATED); + return retval; + } + + /** + * Fetch a SearchParameter by URL + * + * @return Returns null if it can't be found + */ + @Nullable + RuntimeSearchParam getActiveSearchParamByUrl(String theUrl); +} diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 6508b400cd3..6a615025f8d 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index fa09b887655..cea3284ea07 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index e7dd6a9b47e..729b752242d 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT hapi-fhir-spring-boot-sample-client-okhttp diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index bf86d1e69ff..c70d6428ecc 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT hapi-fhir-spring-boot-sample-server-jersey diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index e6ee6b3ee59..01709779112 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT hapi-fhir-spring-boot-samples diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 24c0ec50b3b..81895a5dc6c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 494b6d67de4..5798cb0a6f5 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index aa71145412e..da1cff255bb 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index cefcd8081c8..108cf5a0825 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index cd346a7800f..f37268d1445 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/context/FhirContextDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/context/FhirContextDstu3Test.java index 9c06874d648..ff8b4ce7cbf 100644 --- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/context/FhirContextDstu3Test.java +++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/context/FhirContextDstu3Test.java @@ -49,7 +49,7 @@ public class FhirContextDstu3Test { @Test public void testRuntimeSearchParamToString() { String val = ourCtx.getResourceDefinition("Patient").getSearchParam("gender").toString(); - assertEquals("RuntimeSearchParam[base=[Patient],name=gender,path=Patient.gender,id=,uri=]", val); + assertEquals("RuntimeSearchParam[base=[Patient],name=gender,path=Patient.gender,id=,uri=http://hl7.org/fhir/SearchParameter/patient-gender]", val); } @Test diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index e324f4b62ba..cffad70ec0e 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 213c886e3a7..fe444b0a1c9 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/src/test/java/org/hl7/fhir/r4/model/ModelR4Test.java b/hapi-fhir-structures-r4/src/test/java/org/hl7/fhir/r4/model/ModelR4Test.java index e1a590965ba..5ef973154bc 100644 --- a/hapi-fhir-structures-r4/src/test/java/org/hl7/fhir/r4/model/ModelR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/org/hl7/fhir/r4/model/ModelR4Test.java @@ -1,13 +1,17 @@ package org.hl7.fhir.r4.model; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeSearchParam; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; public class ModelR4Test { + private static final Logger ourLog = LoggerFactory.getLogger(ModelR4Test.class); private static FhirContext ourCtx = FhirContext.forR4(); @Test @@ -29,5 +33,11 @@ public class ModelR4Test { } } + @Test + public void testCompositeRuntimeSearchParamHasComponents() { + RuntimeSearchParam searchParam = ourCtx.getResourceDefinition("Observation").getSearchParam("code-value-concept"); + ourLog.info("Have params: {}", searchParam.getComponents().toString()); + assertEquals(2, searchParam.getComponents().size()); + } } diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index ae5f6837afb..72fd8b4b0ab 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index fd25270a41e..bbb540c8ded 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index e2b40e7befb..305b835d2d1 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index dcaff60c474..bef3d99f342 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index 6b836174ebb..0e98d4c8ead 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index a22a07377f5..77b38d6c862 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 07b693a297e..222c7a6db50 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 04662967414..c3885abf4c3 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index dd8740147fa..1bbb77e484d 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 0c7c2bdccf7..d21005fb478 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml @@ -58,37 +58,37 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu3 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-hl7org-dstu2 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r4 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r5 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu2 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu3 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-r4 - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT org.apache.velocity @@ -169,6 +169,7 @@ org.apache.maven maven-project + org.apache.maven maven-plugin-api - + org.apache.maven.plugin-tools maven-plugin-annotations provided - + diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java index 82a90c6dd76..c5dedf7a164 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java @@ -216,12 +216,14 @@ public class TinderJpaRestServerMojo extends AbstractMojo { TinderJpaRestServerMojo mojo = new TinderJpaRestServerMojo(); mojo.myProject = new MavenProject(); mojo.version = "dstu2"; - mojo.packageBase = "ca.uhn.test"; - mojo.configPackageBase = "ca.uhn.test"; + mojo.packageBase = "ca.uhn.fhir.jpa.rp.r4"; + mojo.configPackageBase = "ca.uhn.fhir.jpa.config"; mojo.baseResourceNames = new ArrayList(Arrays.asList( -// "observation" + "bundle", + "observation", // "communicationrequest" - "binary" + "binary", + "structuredefinition" )); mojo.targetDirectory = new File("target/generated/valuesets"); mojo.targetResourceDirectory = new File("target/generated/valuesets"); diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java index 6c34a5a7813..fa651198092 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java @@ -38,14 +38,14 @@ public class SearchParameter { public List getCompositeOf() { if (myCompositeOf == null) { - myCompositeOf = new ArrayList(); + myCompositeOf = new ArrayList<>(); } return myCompositeOf; } public List getCompositeTypes() { if (myCompositeTypes == null) { - myCompositeTypes = new ArrayList(); + myCompositeTypes = new ArrayList<>(); } return myCompositeTypes; } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java index 7ef2bb8a4a0..bef6ef51ba8 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java @@ -2,7 +2,9 @@ package ca.uhn.fhir.tinder.parser; import java.io.File; import java.util.*; +import java.util.stream.Collectors; +import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import org.apache.commons.lang.WordUtils; import ca.uhn.fhir.context.RuntimeResourceDefinition; @@ -67,11 +69,20 @@ public class ResourceGeneratorUsingModel extends BaseStructureParser { for (RuntimeSearchParam nextSearchParam : def.getSearchParams()) { SearchParameter param = new SearchParameter(getVersion(), def.getName()); - + + List compositeOfParams = nextSearchParam + .getComponents() + .stream() + .map(t -> def.getSearchParams().stream().filter(y -> y.getUri().equals(t.getReference())).findFirst().orElseThrow(() -> new IllegalStateException())) + .collect(Collectors.toList()); + if (nextSearchParam.getParamType() == RestSearchParameterTypeEnum.COMPOSITE && compositeOfParams.size() != 2) { + throw new IllegalStateException("Search param " + nextSearchParam.getName() + " on base " + nextSearchParam.getBase() + " has components: " + nextSearchParam.getComponents()); + } + param.setName(nextSearchParam.getName()); param.setDescription(nextSearchParam.getDescription()); - param.setCompositeOf(toCompositeOfStrings(nextSearchParam.getCompositeOf())); - param.setCompositeTypes(toCompositeOfTypes(nextSearchParam.getCompositeOf())); + param.setCompositeOf(toCompositeOfStrings(compositeOfParams)); + param.setCompositeTypes(toCompositeOfTypes(compositeOfParams)); param.setPath(nextSearchParam.getPath()); param.setType(nextSearchParam.getParamType().getCode()); diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index eede79deb82..9497e5e2aea 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index feb240858a6..8dfad0cf1c8 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. https://hapifhir.io @@ -1267,12 +1267,12 @@ org.apache.maven maven-plugin-api - 3.5.0 + 3.6.3 org.apache.maven.plugin-tools maven-plugin-annotations - 3.5 + 3.6.0 org.apache.velocity diff --git a/restful-server-example/pom.xml b/restful-server-example/pom.xml index 1c31f387041..051b36516f5 100644 --- a/restful-server-example/pom.xml +++ b/restful-server-example/pom.xml @@ -8,7 +8,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../pom.xml diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 407594541ea..c6e0d2c9d18 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index ce2dcbb386f..b399e7eae87 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index bb683de985e..bb5a3413ee0 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE3-SNAPSHOT + 5.4.0-PRE4-SNAPSHOT ../../pom.xml From fcffb04c7bac3d4602049a6c22eba502e187cd3a Mon Sep 17 00:00:00 2001 From: James Agnew Date: Mon, 5 Apr 2021 20:59:59 -0400 Subject: [PATCH 23/61] Allow unknown code systems during validation (#2523) * Allow unknown code systems during validation * Add changelog * Test fix --- ...w-validation-with-unknown-code-system.yaml | 6 + .../validation/validation_support_modules.md | 9 + .../ca/uhn/fhir/jpa/config/BaseConfig.java | 7 +- .../fhir/jpa/term/BaseTermReadSvcImpl.java | 5 - .../validation/JpaValidationSupportChain.java | 6 +- .../dao/r4/FhirResourceDaoR4ValidateTest.java | 159 ++++++++++++------ ...minologyLoaderSvcIntegrationDstu3Test.java | 4 +- ...ownCodeSystemWarningValidationSupport.java | 58 +++++++ 8 files changed, 189 insertions(+), 65 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2523-allow-validation-with-unknown-code-system.yaml create mode 100644 hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2523-allow-validation-with-unknown-code-system.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2523-allow-validation-with-unknown-code-system.yaml new file mode 100644 index 00000000000..6f0602ac2e9 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2523-allow-validation-with-unknown-code-system.yaml @@ -0,0 +1,6 @@ +--- +type: add +issue: 2523 +title: "A new Validation Support Module has been added called UnknownCodeSystemWarningValidationSupport. This module + allows validation to produce a warning but not an error if a code being validated references + an unknown code system." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/validation/validation_support_modules.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/validation/validation_support_modules.md index df801b39f48..a727b73b22c 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/validation/validation_support_modules.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/validation/validation_support_modules.md @@ -134,6 +134,15 @@ This module will invoke the following operations on the remote terminology serve * **POST [base]/CodeSystem/$validate-code** – Validate codes in fields where no specific ValueSet is bound * **POST [base]/ValueSet/$validate-code** – Validate codes in fields where a specific ValueSet is bound +# UnknownCodeSystemWarningValidationSupport + +[JavaDoc](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.html) / [Source](https://github.com/jamesagnew/hapi-fhir/blob/ja_20200218_validation_api_changes/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java) + +This validation support module may be placed at the end of a ValidationSupportChain in order to configure the validator to generate a warning if a resource being validated contains an unknown code system. + +Note that this module must also be activated by calling [setAllowNonExistentCodeSystem(true)](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.html#setAllowNonExistentCodeSystem(boolean)) in order to specify that unknown code systems should be allowed. + + # Recipes The IValidationSupport instance passed to the FhirInstanceValidator will often resemble the chain shown in the diagram below. In this diagram: diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index a0a29757444..942a5e90405 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -130,6 +130,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; import org.hibernate.jpa.HibernatePersistenceProvider; +import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices; import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager; @@ -227,7 +228,6 @@ public abstract class BaseConfig { this.searchCoordQueueCapacity = searchCoordQueueCapacity; } - @Bean public BatchConfigurer batchConfigurer() { return new NonPersistedBatchConfigurer(); @@ -834,6 +834,11 @@ public abstract class BaseConfig { return new JpaResourceLoader(); } + @Bean + public UnknownCodeSystemWarningValidationSupport unknownCodeSystemWarningValidationSupport() { + return new UnknownCodeSystemWarningValidationSupport(fhirContext()); + } + public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) { theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer())); theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity"); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java index 077e4fc8804..87c572f29a0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java @@ -2184,11 +2184,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { retVal = createFailureCodeValidationResult(theCodeSystem, theCode, append); } - if (retVal == null) { - String append = " - Unable to expand ValueSet[" + theValueSetUrl + "]"; - retVal = createFailureCodeValidationResult(theCodeSystem, theCode, append); - } - return retVal; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java index 508bdf3b148..a9faee6063e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java @@ -26,8 +26,9 @@ import ca.uhn.fhir.jpa.packages.NpmJpaValidationSupport; import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService; -import org.hl7.fhir.common.hapi.validation.support.SnapshotGeneratingValidationSupport; import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport; +import org.hl7.fhir.common.hapi.validation.support.SnapshotGeneratingValidationSupport; +import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport; import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -52,6 +53,8 @@ public class JpaValidationSupportChain extends ValidationSupportChain { private NpmJpaValidationSupport myNpmJpaValidationSupport; @Autowired private ITermConceptMappingSvc myConceptMappingSvc; + @Autowired + private UnknownCodeSystemWarningValidationSupport myUnknownCodeSystemWarningValidationSupport; public JpaValidationSupportChain(FhirContext theFhirContext) { myFhirContext = theFhirContext; @@ -77,6 +80,7 @@ public class JpaValidationSupportChain extends ValidationSupportChain { addValidationSupport(myNpmJpaValidationSupport); addValidationSupport(new CommonCodeSystemsTerminologyService(myFhirContext)); addValidationSupport(myConceptMappingSvc); + addValidationSupport(myUnknownCodeSystemWarningValidationSupport); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java index bc6e003154a..c531a58cd9f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java @@ -9,7 +9,6 @@ import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; -import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; @@ -28,6 +27,7 @@ import ca.uhn.fhir.util.OperationOutcomeUtil; import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.validation.IValidatorModule; import org.apache.commons.io.IOUtils; +import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -69,12 +69,10 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.util.AopTestUtils; -import org.springframework.transaction.PlatformTransactionManager; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Collections; -import java.util.Locale; import java.util.stream.Collectors; import static org.awaitility.Awaitility.await; @@ -96,18 +94,111 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { @Autowired private ITermReadSvc myTermReadSvc; @Autowired - private ITermCodeSystemStorageSvc myTermCodeSystemStorageSvcc; - @Autowired private DaoRegistry myDaoRegistry; @Autowired private JpaValidationSupportChain myJpaValidationSupportChain; @Autowired - private PlatformTransactionManager myTransactionManager; - @Autowired private ValidationSettings myValidationSettings; + @Autowired + private UnknownCodeSystemWarningValidationSupport myUnknownCodeSystemWarningValidationSupport; + @AfterEach + public void after() { + FhirInstanceValidator val = AopTestUtils.getTargetObject(myValidatorModule); + val.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning); + + myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences()); + myDaoConfig.setMaximumExpansionSize(DaoConfig.DEFAULT_MAX_EXPANSION_SIZE); + myDaoConfig.setPreExpandValueSets(new DaoConfig().isPreExpandValueSets()); + + BaseTermReadSvcImpl.setInvokeOnNextCallForUnitTest(null); + + myValidationSettings.setLocalReferenceValidationDefaultPolicy(IResourceValidator.ReferenceValidationPolicy.IGNORE); + myFhirCtx.setParserErrorHandler(new StrictErrorHandler()); + + myUnknownCodeSystemWarningValidationSupport.setAllowNonExistentCodeSystem(UnknownCodeSystemWarningValidationSupport.ALLOW_NON_EXISTENT_CODE_SYSTEM_DEFAULT); + } + + /** + * By default an unknown code system should fail vaildation + */ @Test - public void testValidateCodeInValueSetWithUnknownCodeSystem() { + public void testValidateCodeInValueSetWithUnknownCodeSystem_FailValidation() { + createStructureDefWithBindingToUnknownCs(); + + Observation obs = new Observation(); + obs.getMeta().addProfile("http://sd"); + obs.getText().setDivAsString("
    Hello
    "); + obs.getText().setStatus(Narrative.NarrativeStatus.GENERATED); + obs.getCategoryFirstRep().addCoding().setSystem("http://terminology.hl7.org/CodeSystem/observation-category").setCode("vital-signs"); + obs.getCode().setText("hello"); + obs.setSubject(new Reference("Patient/123")); + obs.addPerformer(new Reference("Practitioner/123")); + obs.setEffective(DateTimeType.now()); + obs.setStatus(ObservationStatus.FINAL); + + OperationOutcome oo; + + // Valid code + obs.setValue(new Quantity().setSystem("http://cs").setCode("code1").setValue(123)); + oo = validateAndReturnOutcome(obs); + String encoded = encode(oo); + ourLog.info(encoded); + assertEquals("No issues detected during validation", oo.getIssueFirstRep().getDiagnostics(), encoded); + + // Invalid code + obs.setValue(new Quantity().setSystem("http://cs").setCode("code99").setValue(123)); + oo = validateAndReturnOutcome(obs); + encoded = encode(oo); + ourLog.info(encoded); + assertEquals(1, oo.getIssue().size(), encoded); + assertEquals("The code provided (http://cs#code99) is not in the value set http://vs, and a code from this value set is required: Unknown code system: http://cs", oo.getIssueFirstRep().getDiagnostics(), encoded); + assertEquals(OperationOutcome.IssueSeverity.ERROR, oo.getIssueFirstRep().getSeverity(), encoded); + + } + + /** + * By default an unknown code system should fail vaildation + */ + @Test + public void testValidateCodeInValueSetWithUnknownCodeSystem_Warning() { + myUnknownCodeSystemWarningValidationSupport.setAllowNonExistentCodeSystem(true); + + createStructureDefWithBindingToUnknownCs(); + + Observation obs = new Observation(); + obs.getMeta().addProfile("http://sd"); + obs.getText().setDivAsString("
    Hello
    "); + obs.getText().setStatus(Narrative.NarrativeStatus.GENERATED); + obs.getCategoryFirstRep().addCoding().setSystem("http://terminology.hl7.org/CodeSystem/observation-category").setCode("vital-signs"); + obs.getCode().setText("hello"); + obs.setSubject(new Reference("Patient/123")); + obs.addPerformer(new Reference("Practitioner/123")); + obs.setEffective(DateTimeType.now()); + obs.setStatus(ObservationStatus.FINAL); + + OperationOutcome oo; + String encoded; + + // Valid code + obs.setValue(new Quantity().setSystem("http://cs").setCode("code1").setValue(123)); + oo = validateAndReturnOutcome(obs); + encoded = encode(oo); + ourLog.info(encoded); + assertEquals("No issues detected during validation", oo.getIssueFirstRep().getDiagnostics(), encoded); + + // Invalid code + obs.setValue(new Quantity().setSystem("http://cs").setCode("code99").setValue(123)); + oo = validateAndReturnOutcome(obs); + encoded = encode(oo); + ourLog.info(encoded); + assertEquals(1, oo.getIssue().size(), encoded); + assertEquals("Error Unknown code system: http://cs validating Coding", oo.getIssueFirstRep().getDiagnostics(), encoded); + assertEquals(OperationOutcome.IssueSeverity.WARNING, oo.getIssueFirstRep().getSeverity(), encoded); + + } + + public void createStructureDefWithBindingToUnknownCs() { myValidationSupport.fetchCodeSystem("http://not-exist"); // preload DefaultProfileValidationSupport ValueSet vs = new ValueSet(); @@ -132,32 +223,6 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { .setBinding(new ElementDefinition.ElementDefinitionBindingComponent().setStrength(Enumerations.BindingStrength.REQUIRED).setValueSet("http://vs")) .setId("Observation.value[x]"); myStructureDefinitionDao.create(sd); - - Observation obs = new Observation(); - obs.getMeta().addProfile("http://sd"); - obs.getText().setDivAsString("
    Hello
    "); - obs.getText().setStatus(Narrative.NarrativeStatus.GENERATED); - obs.getCategoryFirstRep().addCoding().setSystem("http://terminology.hl7.org/CodeSystem/observation-category").setCode("vital-signs"); - obs.getCode().setText("hello"); - obs.setSubject(new Reference("Patient/123")); - obs.addPerformer(new Reference("Practitioner/123")); - obs.setEffective(DateTimeType.now()); - obs.setStatus(ObservationStatus.FINAL); - - OperationOutcome oo; - - // Valid code - obs.setValue(new Quantity().setSystem("http://cs").setCode("code1").setValue(123)); - oo = validateAndReturnOutcome(obs); - ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo)); - assertEquals("No issues detected during validation", oo.getIssueFirstRep().getDiagnostics(), encode(oo)); - - // Invalid code - obs.setValue(new Quantity().setSystem("http://cs").setCode("code99").setValue(123)); - oo = validateAndReturnOutcome(obs); - ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo)); - assertEquals("The code provided (http://cs#code99) is not in the value set http://vs, and a code from this value set is required: Unknown code {http://cs}code99 - Unable to expand ValueSet[http://vs]", oo.getIssueFirstRep().getDiagnostics(), encode(oo)); - } @Test @@ -620,8 +685,6 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { } - - @Test public void testValidateValueSet() { String input = "{\n" + @@ -692,7 +755,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { assertThat(ooString, containsString("Unknown code in fragment CodeSystem 'http://example.com/codesystem#foo'")); - assertThat(oo.getIssue().stream().map(t->t.getSeverity().toCode()).collect(Collectors.toList()), contains("warning", "warning")); + assertThat(oo.getIssue().stream().map(t -> t.getSeverity().toCode()).collect(Collectors.toList()), contains("warning", "warning")); } @@ -1080,8 +1143,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { } - private String encode(IBaseResource thePatient) { - return myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(thePatient); + private String encode(IBaseResource theResource) { + return myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(theResource); } @@ -1245,21 +1308,6 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { } } - @AfterEach - public void after() { - FhirInstanceValidator val = AopTestUtils.getTargetObject(myValidatorModule); - val.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning); - - myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences()); - myDaoConfig.setMaximumExpansionSize(DaoConfig.DEFAULT_MAX_EXPANSION_SIZE); - myDaoConfig.setPreExpandValueSets(new DaoConfig().isPreExpandValueSets()); - - BaseTermReadSvcImpl.setInvokeOnNextCallForUnitTest(null); - - myValidationSettings.setLocalReferenceValidationDefaultPolicy(IResourceValidator.ReferenceValidationPolicy.IGNORE); - myFhirCtx.setParserErrorHandler(new StrictErrorHandler()); - } - @Test public void testValidateCapabilityStatement() { @@ -1277,7 +1325,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { cs.getText().setStatus(Narrative.NarrativeStatus.GENERATED).getDiv().setValue("
    aaaa
    "); CapabilityStatement.CapabilityStatementRestComponent rest = cs.addRest(); CapabilityStatement.CapabilityStatementRestResourceComponent patient = rest.addResource(); - patient .setType("Patient"); + patient.setType("Patient"); patient.addSearchParam().setName("foo").setType(Enumerations.SearchParamType.DATE).setDefinition("http://example.com/name"); @@ -1676,5 +1724,4 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { } - } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java index b228a31cfa6..17a7141753b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java @@ -34,6 +34,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.greaterThan; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test { @@ -245,8 +246,7 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test { IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(new UriType("http://loinc.org/vs"), null, new StringType("10013-1-9999999999"), new StringType(ITermLoaderSvc.LOINC_URI), null, null, null, mySrd); - assertFalse(result.isOk()); - assertEquals("Unknown code {http://loinc.org}10013-1-9999999999 - Unable to expand ValueSet[http://loinc.org/vs]", result.getMessage()); + assertNull(result); } private Set toExpandedCodes(ValueSet theExpanded) { diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java new file mode 100644 index 00000000000..1c87167031e --- /dev/null +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java @@ -0,0 +1,58 @@ +package org.hl7.fhir.common.hapi.validation.support; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.support.ConceptValidationOptions; +import ca.uhn.fhir.context.support.ValidationSupportContext; +import org.hl7.fhir.exceptions.TerminologyServiceException; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** + * This validation support module may be placed at the end of a {@link ValidationSupportChain} + * in order to configure the validator to generate a warning if a resource being validated + * contains an unknown code system. + * + * Note that this module must also be activated by calling {@link #setAllowNonExistentCodeSystem(boolean)} + * in order to specify that unknown code systems should be allowed. + */ +public class UnknownCodeSystemWarningValidationSupport extends BaseValidationSupport { + public static final boolean ALLOW_NON_EXISTENT_CODE_SYSTEM_DEFAULT = false; + + private boolean myAllowNonExistentCodeSystem = ALLOW_NON_EXISTENT_CODE_SYSTEM_DEFAULT; + + /** + * Constructor + */ + public UnknownCodeSystemWarningValidationSupport(FhirContext theFhirContext) { + super(theFhirContext); + } + + @Override + public boolean isValueSetSupported(ValidationSupportContext theValidationSupportContext, String theValueSetUrl) { + return true; + } + + @Nullable + @Override + public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + IBaseResource codeSystem = theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theCodeSystem); + if (codeSystem != null) { + return null; + } + + String message = "Unknown code system: " + theCodeSystem; + if (!myAllowNonExistentCodeSystem) { + return new CodeValidationResult() + .setSeverity(IssueSeverity.ERROR) + .setMessage(message); + } + + throw new TerminologyServiceException(message); + } + + public void setAllowNonExistentCodeSystem(boolean theAllowNonExistentCodeSystem) { + myAllowNonExistentCodeSystem = theAllowNonExistentCodeSystem; + } +} From a6cbf7eebdbccc12eed369b0c84d43310f582303 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Tue, 6 Apr 2021 17:36:54 -0400 Subject: [PATCH 24/61] Add support for hierarchycal valueset expansion (#2525) * Add support for hierarchycal valueset expansion * Add changelog * Add tests * Cleanup * Test fix * Test fixes --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- .../support/ValueSetExpansionOptions.java | 19 ++ hapi-fhir-bom/pom.xml | 4 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 8 +- .../5_4_0/2525-hierarchical-expansion.yaml | 6 + hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jaxrsserver-example/pom.xml | 2 +- hapi-fhir-jpaserver-api/pom.xml | 2 +- .../jpa/api/dao/IFhirResourceDaoValueSet.java | 13 +- hapi-fhir-jpaserver-base/pom.xml | 2 +- .../jpa/dao/FhirResourceDaoValueSetDstu2.java | 59 +++--- .../dstu3/FhirResourceDaoValueSetDstu3.java | 33 +-- .../jpa/dao/r4/FhirResourceDaoValueSetR4.java | 47 +--- .../jpa/dao/r5/FhirResourceDaoValueSetR5.java | 33 +-- .../jpa/entity/ITermValueSetConceptView.java | 47 ++++ .../ca/uhn/fhir/jpa/entity/TermConcept.java | 2 +- .../fhir/jpa/entity/TermValueSetConcept.java | 49 ++++- .../jpa/entity/TermValueSetConceptView.java | 77 +++++-- .../entity/TermValueSetConceptViewOracle.java | 75 +++++-- .../BaseJpaResourceProviderValueSetDstu2.java | 8 +- .../BaseJpaResourceProviderValueSetDstu3.java | 72 ++----- .../r4/BaseJpaResourceProviderValueSetR4.java | 110 +++++----- .../r5/BaseJpaResourceProviderValueSetR5.java | 73 ++----- .../fhir/jpa/term/BaseTermReadSvcImpl.java | 200 +++++------------- .../jpa/term/IValueSetConceptAccumulator.java | 8 +- .../jpa/term/ValueSetConceptAccumulator.java | 14 +- ...ansionComponentWithConceptAccumulator.java | 65 ++++-- .../uhn/fhir/jpa/term/api/ITermReadSvc.java | 4 +- .../FhirResourceDaoValueSetDstu2Test.java | 13 +- .../FhirResourceDaoDstu3TerminologyTest.java | 149 +++++++------ .../FhirResourceDaoDstu3ValueSetTest.java | 9 +- ...esourceDaoR4SearchWithElasticSearchIT.java | 7 +- .../r4/FhirResourceDaoR4TerminologyTest.java | 15 +- .../dao/r4/FhirResourceDaoR4ValidateTest.java | 6 +- .../dao/r4/FhirResourceDaoR4ValueSetTest.java | 3 +- .../dao/r5/FhirResourceDaoR5ValueSetTest.java | 3 +- .../ResourceProviderDstu2ValueSetTest.java | 98 +++++---- ...rceProviderR4ValueSetNoVerCSNoVerTest.java | 166 ++++++++++++++- .../term/ValueSetConceptAccumulatorTest.java | 2 +- .../ValueSetExpansionR4ElasticsearchIT.java | 2 +- .../jpa/term/ValueSetExpansionR4Test.java | 16 +- hapi-fhir-jpaserver-batch/pom.xml | 2 +- hapi-fhir-jpaserver-cql/pom.xml | 6 +- hapi-fhir-jpaserver-mdm/pom.xml | 6 +- hapi-fhir-jpaserver-migrate/pom.xml | 2 +- .../tasks/HapiFhirJpaMigrationTasks.java | 4 + hapi-fhir-jpaserver-model/pom.xml | 2 +- .../uhn/fhir/jpa/model/util/JpaConstants.java | 5 + hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 4 +- hapi-fhir-server-mdm/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 16 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 2 +- restful-server-example/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 93 files changed, 930 insertions(+), 728 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2525-hierarchical-expansion.yaml create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ITermValueSetConceptView.java diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 24b70220c38..0bee4719670 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 7799cb47af2..9eb400b46c0 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index c5a761ca696..ac1fd654c29 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java index 443874491f6..0c5edc115f6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java @@ -32,6 +32,17 @@ public class ValueSetExpansionOptions { private boolean myFailOnMissingCodeSystem = true; private int myCount = 1000; private int myOffset = 0; + private boolean myIncludeHierarchy; + private String myFilter; + + public String getFilter() { + return myFilter; + } + + public ValueSetExpansionOptions setFilter(String theFilter) { + myFilter = theFilter; + return this; + } /** * The number of codes to return. @@ -94,6 +105,14 @@ public class ValueSetExpansionOptions { return this; } + public boolean isIncludeHierarchy() { + return myIncludeHierarchy; + } + + public void setIncludeHierarchy(boolean theIncludeHierarchy) { + myIncludeHierarchy = theIncludeHierarchy; + } + public static ValueSetExpansionOptions forOffsetAndCount(int theOffset, int theCount) { return new ValueSetExpansionOptions() .setOffset(theOffset) diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 4dd2fffc8f8..6d6a435eddf 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -3,14 +3,14 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT pom HAPI FHIR BOM ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index 0f7b6eb19c0..b9a5e93b0f6 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index dd903ab4f1b..d7306fba4a9 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index 39c3cc8e20d..4d1fd59e786 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../../hapi-deployable-pom diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index 9783866a885..ac83c296578 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index 6893093762d..78ec60b5d68 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 6f0efaa5903..0783c0ea778 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 20d250d8cc3..ce5eca39f05 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 6b0955b04ab..837b2992aee 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index fb13d1edbb7..58d067d2797 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -78,13 +78,13 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu2 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT compile ca.uhn.hapi.fhir hapi-fhir-jpaserver-subscription - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT compile @@ -101,7 +101,7 @@ ca.uhn.hapi.fhir hapi-fhir-testpage-overlay - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT classes diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2525-hierarchical-expansion.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2525-hierarchical-expansion.yaml new file mode 100644 index 00000000000..18f3694db46 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2525-hierarchical-expansion.yaml @@ -0,0 +1,6 @@ +--- +type: add +issue: 2525 +title: "A new optional parameter has been added to the `ValueSet/$expand` operation. When provided a value of `true`, the + operation will include the concept hierarchy in the expansion response." + diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index 6090e7545d9..bddf4b9e81b 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index 6cfa369bfdc..4d6434eab74 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-example/pom.xml b/hapi-fhir-jaxrsserver-example/pom.xml index 687d6b10dbf..627009d073a 100644 --- a/hapi-fhir-jaxrsserver-example/pom.xml +++ b/hapi-fhir-jaxrsserver-example/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-jpaserver-api/pom.xml b/hapi-fhir-jpaserver-api/pom.xml index 8cf5d00b229..a1b87034a93 100644 --- a/hapi-fhir-jpaserver-api/pom.xml +++ b/hapi-fhir-jpaserver-api/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java index 142158308b0..c0765bf3a46 100644 --- a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java +++ b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.api.dao; */ import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -28,17 +29,11 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; public interface IFhirResourceDaoValueSet extends IFhirResourceDao { - T expand(IIdType theId, String theFilter, RequestDetails theRequestDetails); + T expand(IIdType theId, ValueSetExpansionOptions theOptions, RequestDetails theRequestDetails); - T expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails); + T expand(T theSource, ValueSetExpansionOptions theOptions); - T expand(T theSource, String theFilter); - - T expand(T theSource, String theFilter, int theOffset, int theCount); - - T expandByIdentifier(String theUri, String theFilter); - - T expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount); + T expandByIdentifier(String theUri, ValueSetExpansionOptions theOptions); void purgeCaches(); diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index a8958f299a0..7b0116aad18 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java index e2102790864..25c3cf98bcb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java @@ -24,9 +24,9 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport.CodeValidationResult; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; -import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; @@ -40,6 +40,7 @@ import ca.uhn.fhir.model.primitive.DateTimeDt; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; @@ -51,20 +52,20 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import javax.annotation.Nonnull; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; -import static ca.uhn.fhir.jpa.dao.FhirResourceDaoValueSetDstu2.toStringOrNull; import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoValueSetDstu3.vsValidateCodeOptions; import static ca.uhn.fhir.jpa.util.LogicUtil.multiXor; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FhirResourceDaoValueSetDstu2 extends BaseHapiFhirResourceDao - implements IFhirResourceDaoValueSet, IFhirResourceDaoCodeSystem { + implements IFhirResourceDaoValueSet, IFhirResourceDaoCodeSystem { private DefaultProfileValidationSupport myDefaultProfileValidationSupport; @@ -104,31 +105,32 @@ public class FhirResourceDaoValueSetDstu2 extends BaseHapiFhirResourceDao theCode, IPrimitiveType theSystem, CodingDt theCoding, RequestDetails theRequest) { boolean haveCoding = theCoding != null && isNotBlank(theCoding.getSystem()) && isNotBlank(theCoding.getCode()); @@ -294,20 +287,20 @@ public class FhirResourceDaoValueSetDstu2 extends BaseHapiFhirResourceDao thePrimitive) { - return thePrimitive != null ? thePrimitive.getValue() : null; - } - @Override public IValidationSupport.CodeValidationResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theId, IPrimitiveType theCode, - IPrimitiveType theSystem, IPrimitiveType theDisplay, CodingDt theCoding, CodeableConceptDt theCodeableConcept, RequestDetails theRequest) { + IPrimitiveType theSystem, IPrimitiveType theDisplay, CodingDt theCoding, CodeableConceptDt theCodeableConcept, RequestDetails theRequest) { return myTerminologySvc.validateCode(vsValidateCodeOptions(), theId, toStringOrNull(theValueSetIdentifier), toStringOrNull(theSystem), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept); } @Override - public CodeValidationResult validateCode(IIdType theCodeSystemId, IPrimitiveType theCodeSystemUrl, IPrimitiveType theVersion, IPrimitiveType theCode, - IPrimitiveType theDisplay, CodingDt theCoding, CodeableConceptDt theCodeableConcept, RequestDetails theRequestDetails) { + public CodeValidationResult validateCode(IIdType theCodeSystemId, IPrimitiveType theCodeSystemUrl, IPrimitiveType theVersion, IPrimitiveType theCode, + IPrimitiveType theDisplay, CodingDt theCoding, CodeableConceptDt theCodeableConcept, RequestDetails theRequestDetails) { throw new UnsupportedOperationException(); } + public static String toStringOrNull(IPrimitiveType thePrimitive) { + return thePrimitive != null ? thePrimitive.getValue() : null; + } + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java index 6145e033b33..968ba695414 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java @@ -47,42 +47,21 @@ import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet; public class FhirResourceDaoValueSetDstu3 extends BaseHapiFhirResourceDao implements IFhirResourceDaoValueSet { @Override - public org.hl7.fhir.dstu3.model.ValueSet expand(IIdType theId, String theFilter, RequestDetails theRequestDetails) { + public org.hl7.fhir.dstu3.model.ValueSet expand(IIdType theId, ValueSetExpansionOptions theOptions, RequestDetails theRequestDetails) { org.hl7.fhir.dstu3.model.ValueSet source = read(theId, theRequestDetails); - return expand(source, theFilter); + return expand(source, theOptions); } @Override - public org.hl7.fhir.dstu3.model.ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { - org.hl7.fhir.dstu3.model.ValueSet source = read(theId, theRequestDetails); - return expand(source, theFilter, theOffset, theCount); - } - - @Override - public org.hl7.fhir.dstu3.model.ValueSet expandByIdentifier(String theUri, String theFilter) { - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(null, theUri, theFilter); - return ValueSet30_40.convertValueSet(canonicalOutput); - } - - @Override - public org.hl7.fhir.dstu3.model.ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { - ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(theOffset, theCount); - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(options, theUri, theFilter); - return ValueSet30_40.convertValueSet(canonicalOutput); - } - - @Override - public org.hl7.fhir.dstu3.model.ValueSet expand(org.hl7.fhir.dstu3.model.ValueSet theSource, String theFilter) { + public org.hl7.fhir.dstu3.model.ValueSet expand(org.hl7.fhir.dstu3.model.ValueSet theSource, ValueSetExpansionOptions theOptions) { org.hl7.fhir.r4.model.ValueSet canonicalInput = ValueSet30_40.convertValueSet(theSource); - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(null, canonicalInput, theFilter); + org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(theOptions, canonicalInput); return ValueSet30_40.convertValueSet(canonicalOutput); } @Override - public org.hl7.fhir.dstu3.model.ValueSet expand(org.hl7.fhir.dstu3.model.ValueSet theSource, String theFilter, int theOffset, int theCount) { - ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(theOffset, theCount); - org.hl7.fhir.r4.model.ValueSet canonicalInput = ValueSet30_40.convertValueSet(theSource); - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(options, canonicalInput, theFilter); + public org.hl7.fhir.dstu3.model.ValueSet expandByIdentifier(String theUri, ValueSetExpansionOptions theOptions) { + org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(theOptions, theUri); return ValueSet30_40.convertValueSet(canonicalOutput); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java index 8bc50dd690c..0c01b644d52 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java @@ -44,37 +44,20 @@ import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoValueSetDstu3.vsValidateC public class FhirResourceDaoValueSetR4 extends BaseHapiFhirResourceDao implements IFhirResourceDaoValueSet { @Override - public ValueSet expand(IIdType theId, String theFilter, RequestDetails theRequestDetails) { + public ValueSet expand(IIdType theId, ValueSetExpansionOptions theOptions, RequestDetails theRequestDetails) { ValueSet source = read(theId, theRequestDetails); - return expand(source, theFilter); + return expand(source, theOptions); + } + + + @Override + public ValueSet expandByIdentifier(String theUri, ValueSetExpansionOptions theOptions) { + return myTerminologySvc.expandValueSet(theOptions, theUri); } @Override - public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { - ValueSet source = read(theId, theRequestDetails); - return expand(source, theFilter, theOffset, theCount); - } - - @Override - public ValueSet expandByIdentifier(String theUri, String theFilter) { - return myTerminologySvc.expandValueSet(null, theUri, theFilter); - } - - @Override - public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { - ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(theOffset, theCount); - return myTerminologySvc.expandValueSet(options, theUri, theFilter); - } - - @Override - public ValueSet expand(ValueSet theSource, String theFilter) { - return myTerminologySvc.expandValueSet(null, theSource, theFilter); - } - - @Override - public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { - ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(theOffset, theCount); - return myTerminologySvc.expandValueSet(options, theSource, theFilter); + public ValueSet expand(ValueSet theSource, ValueSetExpansionOptions theOptions) { + return myTerminologySvc.expandValueSet(theOptions, theSource); } @Override @@ -107,15 +90,5 @@ public class FhirResourceDaoValueSetR4 extends BaseHapiFhirResourceDao return retVal; } - public static void validateHaveExpansionOrThrowInternalErrorException(IValidationSupport.ValueSetExpansionOutcome theRetVal) { - if (theRetVal != null && theRetVal.getValueSet() == null) { - throw new InternalErrorException("Unable to expand ValueSet: " + theRetVal.getError()); - } - - if (theRetVal == null) { - throw new InternalErrorException("Unable to expand ValueSet"); - } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java index 13009e60a64..b3cc6c1f331 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java @@ -44,42 +44,21 @@ import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoValueSetDstu3.vsValidateC public class FhirResourceDaoValueSetR5 extends BaseHapiFhirResourceDao implements IFhirResourceDaoValueSet { @Override - public ValueSet expand(IIdType theId, String theFilter, RequestDetails theRequestDetails) { + public ValueSet expand(IIdType theId, ValueSetExpansionOptions theOptions, RequestDetails theRequestDetails) { ValueSet source = read(theId, theRequestDetails); - return expand(source, theFilter); + return expand(source, theOptions); } @Override - public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { - ValueSet source = read(theId, theRequestDetails); - return expand(source, theFilter, theOffset, theCount); - } - - @Override - public ValueSet expandByIdentifier(String theUri, String theFilter) { - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(null, theUri, theFilter); + public ValueSet expandByIdentifier(String theUri, ValueSetExpansionOptions theOptions) { + org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(theOptions, theUri); return ValueSet40_50.convertValueSet(canonicalOutput); } @Override - public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { - ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(theOffset, theCount); - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(options, theUri, theFilter); - return ValueSet40_50.convertValueSet(canonicalOutput); - } - - @Override - public ValueSet expand(ValueSet theSource, String theFilter) { + public ValueSet expand(ValueSet theSource, ValueSetExpansionOptions theOptions) { org.hl7.fhir.r4.model.ValueSet canonicalInput = ValueSet40_50.convertValueSet(theSource); - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(null, canonicalInput, theFilter); - return ValueSet40_50.convertValueSet(canonicalOutput); - } - - @Override - public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { - ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(theOffset, theCount); - org.hl7.fhir.r4.model.ValueSet canonicalInput = ValueSet40_50.convertValueSet(theSource); - org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(options, canonicalInput, theFilter); + org.hl7.fhir.r4.model.ValueSet canonicalOutput = myTerminologySvc.expandValueSet(theOptions, canonicalInput); return ValueSet40_50.convertValueSet(canonicalOutput); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ITermValueSetConceptView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ITermValueSetConceptView.java new file mode 100644 index 00000000000..b8c593f18a8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ITermValueSetConceptView.java @@ -0,0 +1,47 @@ +package ca.uhn.fhir.jpa.entity; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface ITermValueSetConceptView { + String getConceptSystemUrl(); + + String getConceptCode(); + + String getConceptDisplay(); + + Long getSourceConceptPid(); + + String getSourceConceptDirectParentPids(); + + Long getConceptPid(); + + Long getDesignationPid(); + + String getDesignationUseSystem(); + + String getDesignationUseCode(); + + String getDesignationUseDisplay(); + + String getDesignationVal(); + + String getDesignationLang(); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java index aafe8449e2d..522df067346 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java @@ -388,7 +388,7 @@ public class TermConcept implements Serializable { b.append("NONE"); } - myParentPids = b.toString(); + setParentPids(b.toString()); } public TermConcept setParentPids(String theParentPids) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java index 2f1a82e10cd..b62120cc213 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java @@ -27,7 +27,21 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import javax.annotation.Nonnull; -import javax.persistence.*; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.ForeignKey; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; +import javax.persistence.OneToMany; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import javax.persistence.Transient; +import javax.persistence.UniqueConstraint; import java.io.Serializable; import java.util.ArrayList; import java.util.List; @@ -60,6 +74,9 @@ public class TermValueSetConcept implements Serializable { @Column(name = "VALUESET_PID", insertable = false, updatable = false, nullable = false) private Long myValueSetPid; + @Column(name = "INDEX_STATUS", nullable = true) + private Long myIndexStatus; + @Column(name = "VALUESET_ORDER", nullable = false) private int myOrder; @@ -69,6 +86,13 @@ public class TermValueSetConcept implements Serializable { @Transient private String myValueSetName; + @Column(name = "SOURCE_PID", nullable = true) + private Long mySourceConceptPid; + + @Lob + @Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true) + private String mySourceConceptDirectParentPids; + @Column(name = "SYSTEM_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH) private String mySystem; @@ -87,6 +111,13 @@ public class TermValueSetConcept implements Serializable { @Transient private transient Integer myHashCode; + /** + * Constructor + */ + public TermValueSetConcept() { + super(); + } + public Long getId() { return myId; } @@ -219,4 +250,20 @@ public class TermValueSetConcept implements Serializable { .append(myDesignations != null ? ("myDesignations - size=" + myDesignations.size()) : ("myDesignations=(null)")) .toString(); } + + public Long getIndexStatus() { + return myIndexStatus; + } + + public void setIndexStatus(Long theIndexStatus) { + myIndexStatus = theIndexStatus; + } + + public void setSourceConceptPid(Long theSourceConceptPid) { + mySourceConceptPid = theSourceConceptPid; + } + + public void setSourceConceptDirectParentPids(String theSourceConceptDirectParentPids) { + mySourceConceptDirectParentPids = theSourceConceptDirectParentPids; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java index 5110ea8baee..22fe57454c0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java @@ -20,16 +20,20 @@ package ca.uhn.fhir.jpa.entity; * #L% */ +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.apache.commons.io.IOUtils; import org.hibernate.annotations.Immutable; import org.hibernate.annotations.Subselect; import javax.persistence.Column; -import javax.persistence.EmbeddedId; import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; import javax.persistence.Id; +import javax.persistence.Lob; +import java.io.IOException; +import java.io.Reader; import java.io.Serializable; +import java.sql.Clob; +import java.sql.SQLException; @Entity @Immutable @@ -39,26 +43,28 @@ import java.io.Serializable; * because hibernate won't allow the view the function without it, but */ "SELECT CONCAT_WS(' ', vsc.PID, vscd.PID) AS PID, " + - " vsc.PID AS CONCEPT_PID, " + - " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + - " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + - " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + - " vsc.CODEVAL AS CONCEPT_CODEVAL, " + - " vsc.DISPLAY AS CONCEPT_DISPLAY, " + - " vscd.PID AS DESIGNATION_PID, " + - " vscd.LANG AS DESIGNATION_LANG, " + - " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + - " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + - " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + - " vscd.VAL AS DESIGNATION_VAL " + - "FROM TRM_VALUESET_CONCEPT vsc " + - "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID" + " vsc.PID AS CONCEPT_PID, " + + " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + + " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + + " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + + " vsc.CODEVAL AS CONCEPT_CODEVAL, " + + " vsc.DISPLAY AS CONCEPT_DISPLAY, " + + " vsc.SOURCE_PID AS SOURCE_PID, " + + " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " + + " vscd.PID AS DESIGNATION_PID, " + + " vscd.LANG AS DESIGNATION_LANG, " + + " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + + " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + + " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + + " vscd.VAL AS DESIGNATION_VAL " + + "FROM TRM_VALUESET_CONCEPT vsc " + + "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID" ) -public class TermValueSetConceptView implements Serializable { +public class TermValueSetConceptView implements Serializable, ITermValueSetConceptView { private static final long serialVersionUID = 1L; @Id - @Column(name="PID", length = 1000 /* length only needed to satisfy JpaEntityTest, it's not used*/) + @Column(name = "PID", length = 1000 /* length only needed to satisfy JpaEntityTest, it's not used*/) private String id; // still set automatically @Column(name = "CONCEPT_PID") @@ -97,43 +103,76 @@ public class TermValueSetConceptView implements Serializable { @Column(name = "DESIGNATION_VAL", length = TermConceptDesignation.MAX_VAL_LENGTH) private String myDesignationVal; + @Column(name = "SOURCE_PID", nullable = true) + private Long mySourceConceptPid; + @Lob + @Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true) + private Clob mySourceConceptDirectParentPids; + + @Override + public Long getSourceConceptPid() { + return mySourceConceptPid; + } + + @Override + public String getSourceConceptDirectParentPids() { + if (mySourceConceptDirectParentPids != null) { + try (Reader characterStream = mySourceConceptDirectParentPids.getCharacterStream()) { + return IOUtils.toString(characterStream); + } catch (IOException | SQLException e) { + throw new InternalErrorException(e); + } + } + return null; + } + + @Override public Long getConceptPid() { return myConceptPid; } + @Override public String getConceptSystemUrl() { return myConceptSystemUrl; } + @Override public String getConceptCode() { return myConceptCode; } + @Override public String getConceptDisplay() { return myConceptDisplay; } + @Override public Long getDesignationPid() { return myDesignationPid; } + @Override public String getDesignationLang() { return myDesignationLang; } + @Override public String getDesignationUseSystem() { return myDesignationUseSystem; } + @Override public String getDesignationUseCode() { return myDesignationUseCode; } + @Override public String getDesignationUseDisplay() { return myDesignationUseDisplay; } + @Override public String getDesignationVal() { return myDesignationVal; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java index 7f6121e50ce..df9fd196713 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java @@ -20,18 +20,20 @@ package ca.uhn.fhir.jpa.entity; * #L% */ +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.apache.commons.io.IOUtils; import org.hibernate.annotations.Immutable; import org.hibernate.annotations.Subselect; import javax.persistence.Column; -import javax.persistence.DiscriminatorValue; -import javax.persistence.EmbeddedId; import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; import javax.persistence.Id; -import javax.persistence.Table; +import javax.persistence.Lob; +import java.io.IOException; +import java.io.Reader; import java.io.Serializable; +import java.sql.Clob; +import java.sql.SQLException; @Entity @Immutable @@ -41,26 +43,28 @@ import java.io.Serializable; * because hibernate won't allow the view the function without it, but */ "SELECT CONCAT(vsc.PID, CONCAT(' ', vscd.PID)) AS PID, " + - " vsc.PID AS CONCEPT_PID, " + - " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + - " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + - " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + - " vsc.CODEVAL AS CONCEPT_CODEVAL, " + - " vsc.DISPLAY AS CONCEPT_DISPLAY, " + - " vscd.PID AS DESIGNATION_PID, " + - " vscd.LANG AS DESIGNATION_LANG, " + - " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + - " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + - " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + - " vscd.VAL AS DESIGNATION_VAL " + + " vsc.PID AS CONCEPT_PID, " + + " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + + " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + + " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + + " vsc.CODEVAL AS CONCEPT_CODEVAL, " + + " vsc.DISPLAY AS CONCEPT_DISPLAY, " + + " vsc.SOURCE_PID AS SOURCE_PID, " + + " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " + + " vscd.PID AS DESIGNATION_PID, " + + " vscd.LANG AS DESIGNATION_LANG, " + + " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + + " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + + " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + + " vscd.VAL AS DESIGNATION_VAL " + "FROM TRM_VALUESET_CONCEPT vsc " + "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID" ) -public class TermValueSetConceptViewOracle implements Serializable { +public class TermValueSetConceptViewOracle implements Serializable, ITermValueSetConceptView { private static final long serialVersionUID = 1L; @Id - @Column(name="PID", length = 1000 /* length only needed to satisfy JpaEntityTest, it's not used*/) + @Column(name = "PID", length = 1000 /* length only needed to satisfy JpaEntityTest, it's not used*/) private String id; // still set automatically @Column(name = "CONCEPT_PID") @@ -99,43 +103,76 @@ public class TermValueSetConceptViewOracle implements Serializable { @Column(name = "DESIGNATION_VAL", length = TermConceptDesignation.MAX_VAL_LENGTH) private String myDesignationVal; + @Column(name = "SOURCE_PID", nullable = true) + private Long mySourceConceptPid; + @Lob + @Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true) + private Clob mySourceConceptDirectParentPids; + + @Override public Long getConceptPid() { return myConceptPid; } + @Override public String getConceptSystemUrl() { return myConceptSystemUrl; } + @Override public String getConceptCode() { return myConceptCode; } + @Override public String getConceptDisplay() { return myConceptDisplay; } + @Override + public Long getSourceConceptPid() { + return mySourceConceptPid; + } + + @Override + public String getSourceConceptDirectParentPids() { + if (mySourceConceptDirectParentPids != null) { + try (Reader characterStream = mySourceConceptDirectParentPids.getCharacterStream()) { + return IOUtils.toString(characterStream); + } catch (IOException | SQLException e) { + throw new InternalErrorException(e); + } + } + return null; + } + + @Override public Long getDesignationPid() { return myDesignationPid; } + @Override public String getDesignationLang() { return myDesignationLang; } + @Override public String getDesignationUseSystem() { return myDesignationUseSystem; } + @Override public String getDesignationUseCode() { return myDesignationUseCode; } + @Override public String getDesignationUseDisplay() { return myDesignationUseDisplay; } + @Override public String getDesignationVal() { return myDesignationVal; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderValueSetDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderValueSetDstu2.java index 171fad075e3..75847497200 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderValueSetDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderValueSetDstu2.java @@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.provider; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.model.util.JpaConstants; @@ -87,8 +88,11 @@ public class BaseJpaResourceProviderValueSetDstu2 extends JpaResourceProviderDst } } - private String toFilterString(StringDt theFilter) { - return theFilter != null ? theFilter.getValue() : null; + private ValueSetExpansionOptions toFilterString(StringDt theFilter) { + if (theFilter != null) { + return ValueSetExpansionOptions.forOffsetAndCount(0, 1000).setFilter(theFilter.getValue()); + } + return null; } @Operation(name = JpaConstants.OPERATION_LOOKUP, idempotent = true, returnParameters = { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java index 30fd0f05174..8ef38ecf7bf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.provider.dstu3; */ import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseJpaResourceProviderValueSetDstu2; @@ -29,14 +30,24 @@ import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import org.hl7.fhir.dstu3.model.*; +import org.hl7.fhir.dstu3.model.BooleanType; +import org.hl7.fhir.dstu3.model.CodeType; +import org.hl7.fhir.dstu3.model.CodeableConcept; +import org.hl7.fhir.dstu3.model.Coding; +import org.hl7.fhir.dstu3.model.IdType; +import org.hl7.fhir.dstu3.model.IntegerType; +import org.hl7.fhir.dstu3.model.Parameters; +import org.hl7.fhir.dstu3.model.StringType; +import org.hl7.fhir.dstu3.model.UriType; +import org.hl7.fhir.dstu3.model.ValueSet; +import org.hl7.fhir.instance.model.api.IPrimitiveType; import javax.servlet.http.HttpServletRequest; +import static ca.uhn.fhir.jpa.provider.r4.BaseJpaResourceProviderValueSetR4.createValueSetExpansionOptions; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDstu3 { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetDstu3.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -51,6 +62,7 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst @OperationParam(name = "filter", min = 0, max = 1) StringType theFilter, @OperationParam(name = "offset", min = 0, max = 1) IntegerType theOffset, @OperationParam(name = "count", min = 0, max = 1) IntegerType theCount, + @OperationParam(name = JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, min = 0, max = 1, typeName = "boolean") IPrimitiveType theIncludeHierarchy, RequestDetails theRequestDetails) { boolean haveId = theId != null && theId.hasIdPart(); @@ -71,56 +83,21 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst throw new InvalidRequestException("$expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options."); } - int offset = myDaoConfig.getPreExpandValueSetsDefaultOffset(); - if (theOffset != null && theOffset.hasValue()) { - if (theOffset.getValue() >= 0) { - offset = theOffset.getValue(); - } else { - throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); - } - } - - int count = myDaoConfig.getPreExpandValueSetsDefaultCount(); - if (theCount != null && theCount.hasValue()) { - if (theCount.getValue() >= 0) { - count = theCount.getValue(); - } else { - throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); - } - } - int countMax = myDaoConfig.getPreExpandValueSetsMaxCount(); - if (count > countMax) { - ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); - count = countMax; - } + ValueSetExpansionOptions options = createValueSetExpansionOptions(myDaoConfig, theOffset, theCount, theIncludeHierarchy, theFilter); startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (myDaoConfig.isPreExpandValueSets()) { - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); - } else if (haveIdentifier) { - if (haveValueSetVersion) { - return dao.expandByIdentifier(url.getValue() + "|" + theValueSetVersion.getValue(), toFilterString(theFilter), offset, count); - } else { - return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter), offset, count); - } + if (haveId) { + return dao.expand(theId, options, theRequestDetails); + } else if (haveIdentifier) { + if (haveValueSetVersion) { + return dao.expandByIdentifier(url.getValue() + "|" + theValueSetVersion.getValue(), options); } else { - return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + return dao.expandByIdentifier(url.getValue(), options); } } else { - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - if (haveValueSetVersion) { - return dao.expandByIdentifier(url.getValue() + "|" + theValueSetVersion.getValue(), toFilterString(theFilter)); - } else { - return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter)); - } - } else { - return dao.expand(theValueSet, toFilterString(theFilter)); - } + return dao.expand(theValueSet, options); } } finally { endRequest(theServletRequest); @@ -128,11 +105,6 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst } - private String toFilterString(StringType theFilter) { - return theFilter != null ? theFilter.getValue() : null; - } - - @Operation(name = JpaConstants.OPERATION_VALIDATE_CODE, idempotent = true, returnParameters = { @OperationParam(name = "result", type = BooleanType.class, min = 1), @OperationParam(name = "message", type = StringType.class), diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderValueSetR4.java index 1e8e7425689..94cb8a684e2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderValueSetR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderValueSetR4.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.provider.r4; */ import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; +import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseJpaResourceProviderValueSetDstu2; @@ -29,7 +31,17 @@ import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import org.hl7.fhir.r4.model.*; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r4.model.BooleanType; +import org.hl7.fhir.r4.model.CodeType; +import org.hl7.fhir.r4.model.CodeableConcept; +import org.hl7.fhir.r4.model.Coding; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.IntegerType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.StringType; +import org.hl7.fhir.r4.model.UriType; +import org.hl7.fhir.r4.model.ValueSet; import javax.servlet.http.HttpServletRequest; @@ -48,6 +60,7 @@ public class BaseJpaResourceProviderValueSetR4 extends JpaResourceProviderR4 theIncludeHierarchy, RequestDetails theRequestDetails) { boolean haveId = theId != null && theId.hasIdPart(); @@ -63,69 +76,28 @@ public class BaseJpaResourceProviderValueSetR4 extends JpaResourceProviderR4= 0) { - offset = theOffset.getValue(); - } else { - throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); - } - } - - int count = myDaoConfig.getPreExpandValueSetsDefaultCount(); - if (theCount != null && theCount.hasValue()) { - if (theCount.getValue() >= 0) { - count = theCount.getValue(); - } else { - throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); - } - } - int countMax = myDaoConfig.getPreExpandValueSetsMaxCount(); - if (count > countMax) { - ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); - count = countMax; - } + ValueSetExpansionOptions options = createValueSetExpansionOptions(myDaoConfig, theOffset, theCount, theIncludeHierarchy, theFilter); startRequest(theServletRequest); try { + IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (myDaoConfig.isPreExpandValueSets()) { - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); - } else if (haveIdentifier) { - if (haveValueSetVersion) { - return dao.expandByIdentifier(theUrl.getValue() + "|" + theValueSetVersion.getValue(), toFilterString(theFilter), offset, count); - } else { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count); - } + if (haveId) { + return dao.expand(theId, options, theRequestDetails); + } else if (haveIdentifier) { + if (haveValueSetVersion) { + return dao.expandByIdentifier(theUrl.getValue() + "|" + theValueSetVersion.getValue(), options); } else { - return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + return dao.expandByIdentifier(theUrl.getValue(), options); } } else { - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - if (haveValueSetVersion) { - return dao.expandByIdentifier(theUrl.getValue() + "|" + theValueSetVersion.getValue(), toFilterString(theFilter)); - } else { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); - } - } else { - return dao.expand(theValueSet, toFilterString(theFilter)); - } + return dao.expand(theValueSet, options); } } finally { endRequest(theServletRequest); } } - - private String toFilterString(StringType theFilter) { - return theFilter != null ? theFilter.getValue() : null; - } - - - @SuppressWarnings("unchecked") @Operation(name = JpaConstants.OPERATION_VALIDATE_CODE, idempotent = true, returnParameters = { @OperationParam(name = "result", type = BooleanType.class, min = 1), @OperationParam(name = "message", type = StringType.class), @@ -167,6 +139,42 @@ public class BaseJpaResourceProviderValueSetR4 extends JpaResourceProviderR4 theOffset, IPrimitiveType theCount, IPrimitiveType theIncludeHierarchy, IPrimitiveType theFilter) { + int offset = theDaoConfig.getPreExpandValueSetsDefaultOffset(); + if (theOffset != null && theOffset.hasValue()) { + if (theOffset.getValue() >= 0) { + offset = theOffset.getValue(); + } else { + throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + } + } + + int count = theDaoConfig.getPreExpandValueSetsDefaultCount(); + if (theCount != null && theCount.hasValue()) { + if (theCount.getValue() >= 0) { + count = theCount.getValue(); + } else { + throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + } + } + int countMax = theDaoConfig.getPreExpandValueSetsMaxCount(); + if (count > countMax) { + ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + count = countMax; + } + + ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(offset, count); + + if (theIncludeHierarchy != null && Boolean.TRUE.equals(theIncludeHierarchy.getValue())) { + options.setIncludeHierarchy(true); + } + + if (theFilter != null) { + options.setFilter(theFilter.getValue()); + } + + return options; + } private static boolean moreThanOneTrue(boolean... theBooleans) { boolean haveOne = false; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java index c08ca946063..6f17aa2b8c5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.provider.r5; */ import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseJpaResourceProviderValueSetDstu2; @@ -29,14 +30,24 @@ import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import org.hl7.fhir.r5.model.*; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r5.model.BooleanType; +import org.hl7.fhir.r5.model.CodeType; +import org.hl7.fhir.r5.model.CodeableConcept; +import org.hl7.fhir.r5.model.Coding; +import org.hl7.fhir.r5.model.IdType; +import org.hl7.fhir.r5.model.IntegerType; +import org.hl7.fhir.r5.model.Parameters; +import org.hl7.fhir.r5.model.StringType; +import org.hl7.fhir.r5.model.UriType; +import org.hl7.fhir.r5.model.ValueSet; import javax.servlet.http.HttpServletRequest; +import static ca.uhn.fhir.jpa.provider.r4.BaseJpaResourceProviderValueSetR4.createValueSetExpansionOptions; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5 { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetR5.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -48,6 +59,7 @@ public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5 theIncludeHierarchy, RequestDetails theRequestDetails) { boolean haveId = theId != null && theId.hasIdPart(); @@ -63,56 +75,21 @@ public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5= 0) { - offset = theOffset.getValue(); - } else { - throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); - } - } - - int count = myDaoConfig.getPreExpandValueSetsDefaultCount(); - if (theCount != null && theCount.hasValue()) { - if (theCount.getValue() >= 0) { - count = theCount.getValue(); - } else { - throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); - } - } - int countMax = myDaoConfig.getPreExpandValueSetsMaxCount(); - if (count > countMax) { - ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); - count = countMax; - } + ValueSetExpansionOptions options = createValueSetExpansionOptions(myDaoConfig, theOffset, theCount, theIncludeHierarchy, theFilter); startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (myDaoConfig.isPreExpandValueSets()) { - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); - } else if (haveIdentifier) { - if (haveValueSetVersion) { - return dao.expandByIdentifier(theUrl.getValue() + "|" + theValueSetVersion.getValue(), toFilterString(theFilter), offset, count); - } else { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count); - } + if (haveId) { + return dao.expand(theId, options, theRequestDetails); + } else if (haveIdentifier) { + if (haveValueSetVersion) { + return dao.expandByIdentifier(theUrl.getValue() + "|" + theValueSetVersion.getValue(), options); } else { - return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + return dao.expandByIdentifier(theUrl.getValue(), options); } } else { - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - if (haveValueSetVersion) { - return dao.expandByIdentifier(theUrl.getValue() + "|" + theValueSetVersion.getValue(), toFilterString(theFilter)); - } else { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); - } - } else { - return dao.expand(theValueSet, toFilterString(theFilter)); - } + return dao.expand(theValueSet, options); } } finally { endRequest(theServletRequest); @@ -120,12 +97,6 @@ public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5 t.getParent().getId().toString()) + .collect(Collectors.joining(" ")); + Collection designations = theConcept.getDesignations(); if (StringUtils.isNotEmpty(theValueSetIncludeVersion)) { - return addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, designations, theAdd, codeSystem + "|" + theValueSetIncludeVersion, code, display); + return addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, designations, theAdd, codeSystem + "|" + theValueSetIncludeVersion, code, display, sourceConceptPid, directParentPids); } else { - return addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, designations, theAdd, codeSystem, code, display); + return addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, designations, theAdd, codeSystem, code, display, sourceConceptPid, directParentPids); } } - private void addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, boolean theAdd, String theCodeSystem, String theCodeSystemVersion, String theCode, String theDisplay) { + private void addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, boolean theAdd, String theCodeSystem, String theCodeSystemVersion, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids) { if (StringUtils.isNotEmpty(theCodeSystemVersion)) { if (isNoneBlank(theCodeSystem, theCode)) { if (theAdd && theAddedCodes.add(theCodeSystem + "|" + theCode)) { - theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem + "|" + theCodeSystemVersion, theCode, theDisplay, null); + theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem + "|" + theCodeSystemVersion, theCode, theDisplay, null, theSourceConceptPid, theSourceConceptDirectParentPids); } if (!theAdd && theAddedCodes.remove(theCodeSystem + "|" + theCode)) { @@ -272,7 +280,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } } else { if (theAdd && theAddedCodes.add(theCodeSystem + "|" + theCode)) { - theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem, theCode, theDisplay, null); + theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem, theCode, theDisplay, null, theSourceConceptPid, theSourceConceptDirectParentPids); } if (!theAdd && theAddedCodes.remove(theCodeSystem + "|" + theCode)) { @@ -281,10 +289,10 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } } - private boolean addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, Collection theDesignations, boolean theAdd, String theCodeSystem, String theCode, String theDisplay) { + private boolean addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, Collection theDesignations, boolean theAdd, String theCodeSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids) { if (isNoneBlank(theCodeSystem, theCode)) { if (theAdd && theAddedCodes.add(theCodeSystem + "|" + theCode)) { - theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem, theCode, theDisplay, theDesignations); + theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem, theCode, theDisplay, theDesignations, theSourceConceptPid, theSourceConceptDirectParentPids); return true; } @@ -353,10 +361,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { @Override @Transactional public List expandValueSetIntoConceptList(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl) { - String expansionFilter = null; // TODO: DM 2019-09-10 - This is problematic because an incorrect URL that matches ValueSet.id will not be found in the terminology tables but will yield a ValueSet here. Depending on the ValueSet, the expansion may time-out. - ValueSet expanded = expandValueSet(theExpansionOptions, theValueSetCanonicalUrl, expansionFilter); + ValueSet expanded = expandValueSet(theExpansionOptions, theValueSetCanonicalUrl); ArrayList retVal = new ArrayList<>(); for (ValueSet.ValueSetExpansionContainsComponent nextContains : expanded.getExpansion().getContains()) { @@ -367,25 +374,23 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { @Override @Transactional - public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl, @Nullable String theExpansionFilter) { + public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl) { ValueSet valueSet = fetchCanonicalValueSetFromCompleteContext(theValueSetCanonicalUrl); if (valueSet == null) { throw new ResourceNotFoundException("Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSetCanonicalUrl)); } - return expandValueSet(theExpansionOptions, valueSet, theExpansionFilter); + return expandValueSet(theExpansionOptions, valueSet); } @Override @Transactional(propagation = Propagation.REQUIRED) public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand) { - return expandValueSet(theExpansionOptions, theValueSetToExpand, (String) null); - } - - @Override - @Transactional(propagation = Propagation.REQUIRED) - public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand, @Nullable String theFilter) { - return expandValueSet(theExpansionOptions, theValueSetToExpand, ExpansionFilter.fromFilterString(theFilter)); + String filter = null; + if (theExpansionOptions != null) { + filter = theExpansionOptions.getFilter(); + } + return expandValueSet(theExpansionOptions, theValueSetToExpand, ExpansionFilter.fromFilterString(filter)); } private ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, ExpansionFilter theFilter) { @@ -395,7 +400,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { int offset = expansionOptions.getOffset(); int count = expansionOptions.getCount(); - ValueSetExpansionComponentWithConceptAccumulator accumulator = new ValueSetExpansionComponentWithConceptAccumulator(myContext, count); + ValueSetExpansionComponentWithConceptAccumulator accumulator = new ValueSetExpansionComponentWithConceptAccumulator(myContext, count, expansionOptions.isIncludeHierarchy()); accumulator.setHardExpansionMaximumSize(myDaoConfig.getMaximumExpansionSize()); accumulator.setSkipCountRemaining(offset); accumulator.setIdentifier(UUID.randomUUID().toString()); @@ -424,6 +429,11 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { .setUrl(HapiExtensions.EXT_VALUESET_EXPANSION_MESSAGE) .setValue(new StringType(next)); } + + if (expansionOptions.isIncludeHierarchy()) { + accumulator.applyHierarchy(); + } + return valueSet; } @@ -469,124 +479,14 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { */ String msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion"); theAccumulator.addMessage(msg); - if (isOracleDialect()) { - expandConceptsOracle(theAccumulator, termValueSet, theFilter, theAdd); - } else { - expandConcepts(theAccumulator, termValueSet, theFilter, theAdd); - } + expandConcepts(theAccumulator, termValueSet, theFilter, theAdd, isOracleDialect()); } private boolean isOracleDialect() { return myHibernatePropertiesProvider.getDialect() instanceof org.hibernate.dialect.Oracle12cDialect; } - private void expandConceptsOracle(IValueSetConceptAccumulator theAccumulator, TermValueSet theTermValueSet, ExpansionFilter theFilter, boolean theAdd) { - // Literal copy paste from expandConcepts but tailored for Oracle since we can't reliably extend the DAO and hibernate classes - Integer offset = theAccumulator.getSkipCountRemaining(); - offset = ObjectUtils.defaultIfNull(offset, 0); - offset = Math.min(offset, theTermValueSet.getTotalConcepts().intValue()); - - Integer count = theAccumulator.getCapacityRemaining(); - count = defaultIfNull(count, myDaoConfig.getMaximumExpansionSize()); - - int conceptsExpanded = 0; - int designationsExpanded = 0; - int toIndex = offset + count; - - Collection conceptViews; - boolean wasFilteredResult = false; - String filterDisplayValue = null; - if (!theFilter.getFilters().isEmpty() && JpaConstants.VALUESET_FILTER_DISPLAY.equals(theFilter.getFilters().get(0).getProperty()) && theFilter.getFilters().get(0).getOp() == ValueSet.FilterOperator.EQUAL) { - filterDisplayValue = lowerCase(theFilter.getFilters().get(0).getValue().replace("%", "[%]")); - String displayValue = "%" + lowerCase(filterDisplayValue) + "%"; - conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); - wasFilteredResult = true; - } else { - // TODO JA HS: I'm pretty sure we are overfetching here. test says offset 3, count 4, but we are fetching index 3 -> 10 here, grabbing 7 concepts. - //Specifically this test testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange - conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); - theAccumulator.consumeSkipCount(offset); - if (theAdd) { - theAccumulator.incrementOrDecrementTotalConcepts(true, theTermValueSet.getTotalConcepts().intValue()); - } - } - - if (conceptViews.isEmpty()) { - logConceptsExpanded("No concepts to expand. ", theTermValueSet, conceptsExpanded); - return; - } - - Map pidToConcept = new LinkedHashMap<>(); - ArrayListMultimap pidToDesignations = ArrayListMultimap.create(); - - for (TermValueSetConceptViewOracle conceptView : conceptViews) { - - String system = conceptView.getConceptSystemUrl(); - String code = conceptView.getConceptCode(); - String display = conceptView.getConceptDisplay(); - - //-- this is quick solution, may need to revisit - if (!applyFilter(display, filterDisplayValue)) - continue; - - Long conceptPid = conceptView.getConceptPid(); - if (!pidToConcept.containsKey(conceptPid)) { - FhirVersionIndependentConcept concept = new FhirVersionIndependentConcept(system, code, display); - pidToConcept.put(conceptPid, concept); - } - - // TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation to designations optional. - if (conceptView.getDesignationPid() != null) { - TermConceptDesignation designation = new TermConceptDesignation(); - designation.setUseSystem(conceptView.getDesignationUseSystem()); - designation.setUseCode(conceptView.getDesignationUseCode()); - designation.setUseDisplay(conceptView.getDesignationUseDisplay()); - designation.setValue(conceptView.getDesignationVal()); - designation.setLanguage(conceptView.getDesignationLang()); - pidToDesignations.put(conceptPid, designation); - - if (++designationsExpanded % 250 == 0) { - logDesignationsExpanded("Expansion of designations in progress. ", theTermValueSet, designationsExpanded); - } - } - - if (++conceptsExpanded % 250 == 0) { - logConceptsExpanded("Expansion of concepts in progress. ", theTermValueSet, conceptsExpanded); - } - } - - for (Long nextPid : pidToConcept.keySet()) { - FhirVersionIndependentConcept concept = pidToConcept.get(nextPid); - List designations = pidToDesignations.get(nextPid); - String system = concept.getSystem(); - String code = concept.getCode(); - String display = concept.getDisplay(); - - if (theAdd) { - if (theAccumulator.getCapacityRemaining() != null) { - if (theAccumulator.getCapacityRemaining() == 0) { - break; - } - } - - theAccumulator.includeConceptWithDesignations(system, code, display, designations); - } else { - boolean removed = theAccumulator.excludeConcept(system, code); - if (removed) { - theAccumulator.incrementOrDecrementTotalConcepts(false, 1); - } - } - } - - if (wasFilteredResult && theAdd) { - theAccumulator.incrementOrDecrementTotalConcepts(true, pidToConcept.size()); - } - - logDesignationsExpanded("Finished expanding designations. ", theTermValueSet, designationsExpanded); - logConceptsExpanded("Finished expanding concepts. ", theTermValueSet, conceptsExpanded); - } - - private void expandConcepts(IValueSetConceptAccumulator theAccumulator, TermValueSet theTermValueSet, ExpansionFilter theFilter, boolean theAdd) { + private void expandConcepts(IValueSetConceptAccumulator theAccumulator, TermValueSet theTermValueSet, ExpansionFilter theFilter, boolean theAdd, boolean theOracle) { // NOTE: if you modifiy the logic here, look to `expandConceptsOracle` and see if your new code applies to its copy pasted sibling Integer offset = theAccumulator.getSkipCountRemaining(); offset = ObjectUtils.defaultIfNull(offset, 0); @@ -599,19 +499,26 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { int designationsExpanded = 0; int toIndex = offset + count; - Collection conceptViews; - Collection conceptViewsOracle; + Collection conceptViews; boolean wasFilteredResult = false; String filterDisplayValue = null; if (!theFilter.getFilters().isEmpty() && JpaConstants.VALUESET_FILTER_DISPLAY.equals(theFilter.getFilters().get(0).getProperty()) && theFilter.getFilters().get(0).getOp() == ValueSet.FilterOperator.EQUAL) { filterDisplayValue = lowerCase(theFilter.getFilters().get(0).getValue().replace("%", "[%]")); String displayValue = "%" + lowerCase(filterDisplayValue) + "%"; - conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); + if (theOracle) { + conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); + } else { + conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); + } wasFilteredResult = true; } else { // TODO JA HS: I'm pretty sure we are overfetching here. test says offset 3, count 4, but we are fetching index 3 -> 10 here, grabbing 7 concepts. //Specifically this test testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange - conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); + if (theOracle) { + conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); + } else { + conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); + } theAccumulator.consumeSkipCount(offset); if (theAdd) { theAccumulator.incrementOrDecrementTotalConcepts(true, theTermValueSet.getTotalConcepts().intValue()); @@ -625,8 +532,10 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { Map pidToConcept = new LinkedHashMap<>(); ArrayListMultimap pidToDesignations = ArrayListMultimap.create(); + Map pidToSourcePid = new HashMap<>(); + Map pidToSourceDirectParentPids = new HashMap<>(); - for (TermValueSetConceptView conceptView : conceptViews) { + for (ITermValueSetConceptView conceptView : conceptViews) { String system = conceptView.getConceptSystemUrl(); String code = conceptView.getConceptCode(); @@ -657,6 +566,11 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } } + if (theAccumulator.isTrackingHierarchy()) { + pidToSourcePid.put(conceptPid, conceptView.getSourceConceptPid()); + pidToSourceDirectParentPids.put(conceptPid, conceptView.getSourceConceptDirectParentPids()); + } + if (++conceptsExpanded % 250 == 0) { logConceptsExpanded("Expansion of concepts in progress. ", theTermValueSet, conceptsExpanded); } @@ -676,7 +590,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } } - theAccumulator.includeConceptWithDesignations(system, code, display, designations); + Long sourceConceptPid = pidToSourcePid.get(nextPid); + String sourceConceptDirectParentPids = pidToSourceDirectParentPids.get(nextPid); + theAccumulator.includeConceptWithDesignations(system, code, display, designations, sourceConceptPid, sourceConceptDirectParentPids); } else { boolean removed = theAccumulator.excludeConcept(system, code); if (removed) { @@ -1158,7 +1074,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { private void addOrRemoveCode(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, boolean theAdd, String theSystem, String theCode, String theDisplay) { if (theAdd && theAddedCodes.add(theSystem + "|" + theCode)) { - theValueSetCodeAccumulator.includeConcept(theSystem, theCode, theDisplay); + theValueSetCodeAccumulator.includeConcept(theSystem, theCode, theDisplay, null, null); } if (!theAdd && theAddedCodes.remove(theSystem + "|" + theCode)) { theValueSetCodeAccumulator.excludeConcept(theSystem, theCode); @@ -1506,7 +1422,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { if (theInclude.getConcept().isEmpty()) { for (TermConcept next : theVersion.getConcepts()) { - addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, theAdd, theSystem, theInclude.getVersion(), next.getCode(), next.getDisplay()); + addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, theAdd, theSystem, theInclude.getVersion(), next.getCode(), next.getDisplay(), next.getId(), next.getParentPidsAsString()); } } @@ -1514,7 +1430,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { if (!theSystem.equals(theInclude.getSystem()) && isNotBlank(theSystem)) { continue; } - addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, theAdd, theSystem, theInclude.getVersion(), next.getCode(), next.getDisplay()); + addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, theAdd, theSystem, theInclude.getVersion(), next.getCode(), next.getDisplay(), null, null); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java index 869871e4313..9a60a5e9b95 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java @@ -29,9 +29,9 @@ public interface IValueSetConceptAccumulator { void addMessage(String theMessage); - void includeConcept(String theSystem, String theCode, String theDisplay); + void includeConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids); - void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, @Nullable Collection theDesignations); + void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, @Nullable Collection theDesignations, Long theSourceConceptPid, String theSourceConceptDirectParentPids); /** * @return Returns true if the code was actually present and was removed @@ -48,6 +48,10 @@ public interface IValueSetConceptAccumulator { return null; } + default boolean isTrackingHierarchy() { + return true; + } + @Nullable default void consumeSkipCount(int theSkipCountToConsume) { // nothing diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index b0231fab16b..1cc63e1d8dd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -66,13 +66,13 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { } @Override - public void includeConcept(String theSystem, String theCode, String theDisplay) { - saveConcept(theSystem, theCode, theDisplay); + public void includeConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids) { + saveConcept(theSystem, theCode, theDisplay, theSourceConceptPid, theSourceConceptDirectParentPids); } @Override - public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations) { - TermValueSetConcept concept = saveConcept(theSystem, theCode, theDisplay); + public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations, Long theSourceConceptPid, String theSourceConceptDirectParentPids) { + TermValueSetConcept concept = saveConcept(theSystem, theCode, theDisplay, theSourceConceptPid, theSourceConceptDirectParentPids); if (theDesignations != null) { for (TermConceptDesignation designation : theDesignations) { saveConceptDesignation(concept, designation); @@ -117,7 +117,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { return false; } - private TermValueSetConcept saveConcept(String theSystem, String theCode, String theDisplay) { + private TermValueSetConcept saveConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids) { ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystem, "ValueSet contains a concept with no system value"); ValidateUtil.isNotBlankOrThrowInvalidRequest(theCode, "ValueSet contains a concept with no code value"); @@ -135,6 +135,10 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { if (isNotBlank(theDisplay)) { concept.setDisplay(theDisplay); } + + concept.setSourceConceptPid(theSourceConceptPid); + concept.setSourceConceptDirectParentPids(theSourceConceptDirectParentPids); + myValueSetConceptDao.save(concept); myValueSetDao.save(myTermValueSet.incrementTotalConcepts()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java index d368800ea81..353f44c3911 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.term; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; import ca.uhn.fhir.jpa.term.ex.ExpansionTooCostlyException; import ca.uhn.fhir.model.api.annotation.Block; @@ -32,9 +31,15 @@ import org.hl7.fhir.r4.model.ValueSet; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; @Block() public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.ValueSetExpansionComponent implements IValueSetConceptAccumulator { @@ -45,25 +50,21 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V private List myMessages; private int myAddedConcepts; private Integer myTotalConcepts; - - /** - * Constructor - * - * @param theDaoConfig Will be used to determine the max capacity for this accumulator - */ - public ValueSetExpansionComponentWithConceptAccumulator(FhirContext theContext, DaoConfig theDaoConfig) { - this(theContext, theDaoConfig.getMaximumExpansionSize()); - } + private Map mySourcePidToConcept = new HashMap<>(); + private Map myConceptToSourceDirectParentPids = new HashMap<>(); + private boolean myTrackingHierarchy; /** * Constructor * * @param theMaxCapacity The maximum number of results this accumulator will accept before throwing * an {@link InternalErrorException} + * @param theTrackingHierarchy */ - ValueSetExpansionComponentWithConceptAccumulator(FhirContext theContext, int theMaxCapacity) { + ValueSetExpansionComponentWithConceptAccumulator(FhirContext theContext, int theMaxCapacity, boolean theTrackingHierarchy) { myMaxCapacity = theMaxCapacity; myContext = theContext; + myTrackingHierarchy = theTrackingHierarchy; } @Nonnull @@ -79,6 +80,11 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V return Collections.unmodifiableList(myMessages); } + @Override + public boolean isTrackingHierarchy() { + return myTrackingHierarchy; + } + @Override public void addMessage(String theMessage) { if (myMessages == null) { @@ -88,7 +94,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V } @Override - public void includeConcept(String theSystem, String theCode, String theDisplay) { + public void includeConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids) { if (mySkipCountRemaining > 0) { mySkipCountRemaining--; return; @@ -103,7 +109,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V } @Override - public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations) { + public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations, Long theSourceConceptPid, String theSourceConceptDirectParentPids) { if (mySkipCountRemaining > 0) { mySkipCountRemaining--; return; @@ -112,6 +118,14 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V incrementConceptsCount(); ValueSet.ValueSetExpansionContainsComponent contains = this.addContains(); + + if (theSourceConceptPid != null) { + mySourcePidToConcept.put(theSourceConceptPid, contains); + } + if (theSourceConceptDirectParentPids != null) { + myConceptToSourceDirectParentPids.put(contains, theSourceConceptDirectParentPids); + } + setSystemAndVersion(theSystem, contains); contains.setCode(theCode); contains.setDisplay(theDisplay); @@ -215,4 +229,29 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V public void setHardExpansionMaximumSize(int theHardExpansionMaximumSize) { myHardExpansionMaximumSize = theHardExpansionMaximumSize; } + + public void applyHierarchy() { + for (int i = 0; i < this.getContains().size(); i++) { + ValueSet.ValueSetExpansionContainsComponent nextContains = this.getContains().get(i); + + String directParentPidsString = myConceptToSourceDirectParentPids.get(nextContains); + if (isNotBlank(directParentPidsString)) { + List directParentPids = Arrays.stream(directParentPidsString.split(" ")).map(t -> Long.parseLong(t)).collect(Collectors.toList()); + + boolean firstMatch = false; + for (Long next : directParentPids) { + ValueSet.ValueSetExpansionContainsComponent parentConcept = mySourcePidToConcept.get(next); + if (parentConcept != null) { + if (!firstMatch) { + firstMatch = true; + this.getContains().remove(i); + i--; + } + + parentConcept.addContains(nextContains); + } + } + } + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java index 94e482fddba..0f04f62bd8e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java @@ -58,12 +58,10 @@ import java.util.Set; */ public interface ITermReadSvc extends IValidationSupport { - ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl, @Nullable String theExpansionFilter); + ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl); ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand); - ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand, @Nullable String theFilter); - void expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator); /** diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java index a4569557dd1..8b8289a3980 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.dstu2; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; import ca.uhn.fhir.model.dstu2.composite.CodingDt; @@ -180,7 +181,7 @@ public class FhirResourceDaoValueSetDstu2Test extends BaseJpaDstu2Test { * Filter with display name */ - expanded = myValueSetDao.expand(myExtensionalVsId, ("systolic"), mySrd); + expanded = myValueSetDao.expand(myExtensionalVsId, new ValueSetExpansionOptions().setFilter("systolic"), mySrd); resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off @@ -193,7 +194,7 @@ public class FhirResourceDaoValueSetDstu2Test extends BaseJpaDstu2Test { * Filter with code */ - expanded = myValueSetDao.expand(myExtensionalVsId, ("11378"), mySrd); + expanded = myValueSetDao.expand(myExtensionalVsId, new ValueSetExpansionOptions().setFilter("11378"), mySrd); resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off @@ -205,7 +206,7 @@ public class FhirResourceDaoValueSetDstu2Test extends BaseJpaDstu2Test { @Test public void testExpandByIdentifier() { - ValueSet expanded = myValueSetDao.expandByIdentifier("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", "11378"); + ValueSet expanded = myValueSetDao.expandByIdentifier("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", new ValueSetExpansionOptions().setFilter("11378")); String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off @@ -220,7 +221,7 @@ public class FhirResourceDaoValueSetDstu2Test extends BaseJpaDstu2Test { @Test public void testExpandByValueSet() throws IOException { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-2.xml"); - ValueSet expanded = myValueSetDao.expand(toExpand, "11378"); + ValueSet expanded = myValueSetDao.expand(toExpand, new ValueSetExpansionOptions().setFilter("11378")); String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off @@ -235,12 +236,12 @@ public class FhirResourceDaoValueSetDstu2Test extends BaseJpaDstu2Test { @Test public void testValidateCodeForCodeSystemOperationNotSupported() { try { - ((IFhirResourceDaoCodeSystem)myValueSetDao).validateCode(null, null, null, null, null, null, null, null); + ((IFhirResourceDaoCodeSystem) myValueSetDao).validateCode(null, null, null, null, null, null, null, null); fail(); } catch (UnsupportedOperationException theE) { assertNotNull(theE); } } - + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java index 70d144cbe00..8dac2aa1096 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java @@ -1,16 +1,17 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; -import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.parser.IParser; +import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParamModifier; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -18,7 +19,6 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; -import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ValidationResult; import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport; @@ -38,7 +38,6 @@ import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator; import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent; import org.hl7.fhir.instance.model.api.IIdType; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -48,26 +47,27 @@ import java.util.ArrayList; import java.util.List; import java.util.Set; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsStringIgnoringCase; import static org.hamcrest.Matchers.empty; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.fail; public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3TerminologyTest.class); public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system"; public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set"; - + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3TerminologyTest.class); @Autowired private CachingValidationSupport myCachingValidationSupport; + @Autowired + private ITermDeferredStorageSvc myTermDeferredStorageSvc; @AfterEach public void after() { myDaoConfig.setDeferIndexingForCodesystemsOfSize(new DaoConfig().getDeferIndexingForCodesystemsOfSize()); - + TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false); } @@ -138,7 +138,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { cs.getConcepts().add(parentA); for (int i = 0; i < 450; i++) { - TermConcept childI = new TermConcept(cs, "subCodeA"+i).setDisplay("Sub-code A"+i); + TermConcept childI = new TermConcept(cs, "subCodeA" + i).setDisplay("Sub-code A" + i); parentA.addChild(childI, RelationshipTypeEnum.ISA); } @@ -146,18 +146,15 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { cs.getConcepts().add(parentB); for (int i = 0; i < 450; i++) { - TermConcept childI = new TermConcept(cs, "subCodeB"+i).setDisplay("Sub-code B"+i); + TermConcept childI = new TermConcept(cs, "subCodeB" + i).setDisplay("Sub-code B" + i); parentB.addChild(childI, RelationshipTypeEnum.ISA); } - myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION" , cs, table); + myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION", cs, table); myTermDeferredStorageSvc.saveAllDeferred(); } - @Autowired - private ITermDeferredStorageSvc myTermDeferredStorageSvc; - private void createExternalCsAndLocalVs() { CodeSystem codeSystem = createExternalCs(); @@ -181,17 +178,17 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { TermConcept goodbye = new TermConcept(cs, "goodbye").setDisplay("Goodbye"); cs.getConcepts().add(goodbye); - + TermConcept dogs = new TermConcept(cs, "dogs").setDisplay("Dogs"); cs.getConcepts().add(dogs); - + TermConcept labrador = new TermConcept(cs, "labrador").setDisplay("Labrador"); dogs.addChild(labrador, RelationshipTypeEnum.ISA); TermConcept beagle = new TermConcept(cs, "beagle").setDisplay("Beagle"); dogs.addChild(beagle, RelationshipTypeEnum.ISA); - myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM,"SYSTEM NAME", "SYSTEM VERSION" , cs, table); + myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION", cs, table); return codeSystem; } @@ -199,17 +196,17 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { //@formatter:off CodeSystem codeSystem = new CodeSystem(); codeSystem.setUrl(URL_MY_CODE_SYSTEM); - codeSystem.setContent(CodeSystemContentMode.COMPLETE); + codeSystem.setContent(CodeSystemContentMode.COMPLETE); codeSystem .addConcept().setCode("A").setDisplay("Code A") - .addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA") - .addConcept(new ConceptDefinitionComponent().setCode("AAA").setDisplay("Code AAA")) - ) - .addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB")); + .addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA") + .addConcept(new ConceptDefinitionComponent().setCode("AAA").setDisplay("Code AAA")) + ) + .addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB")); codeSystem .addConcept().setCode("B").setDisplay("Code B") - .addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code BA")) - .addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code BB")); + .addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code BA")) + .addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code BB")); //@formatter:on myCodeSystemDao.create(codeSystem, mySrd); @@ -262,15 +259,15 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { //@formatter:off CodeSystem codeSystem = new CodeSystem(); codeSystem.setUrl(URL_MY_CODE_SYSTEM); - codeSystem.setContent(CodeSystemContentMode.COMPLETE); + codeSystem.setContent(CodeSystemContentMode.COMPLETE); codeSystem .addConcept().setCode("A").setDisplay("Code A") - .addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA")) - .addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB")); + .addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA")) + .addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB")); codeSystem .addConcept().setCode("B").setDisplay("Code A") - .addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code AA")) - .addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code AB")); + .addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code AA")) + .addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code AB")); //@formatter:on IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified(); @@ -306,20 +303,20 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { valueSet.setUrl(URL_MY_VALUE_SET); valueSet.getCompose() .addInclude() - .setSystem(codeSystem.getUrl()) - .addConcept(new ConceptReferenceComponent().setCode("hello")) - .addConcept(new ConceptReferenceComponent().setCode("goodbye")); + .setSystem(codeSystem.getUrl()) + .addConcept(new ConceptReferenceComponent().setCode("hello")) + .addConcept(new ConceptReferenceComponent().setCode("goodbye")); valueSet.getCompose() .addInclude() - .setSystem(codeSystem.getUrl()) - .addFilter() - .setProperty("concept") - .setOp(FilterOperator.ISA) - .setValue("dogs"); - + .setSystem(codeSystem.getUrl()) + .addFilter() + .setProperty("concept") + .setOp(FilterOperator.ISA) + .setValue("dogs"); + myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, ""); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("")); logAndValidateValueSet(result); assertEquals(4, result.getExpansion().getTotal()); @@ -332,13 +329,13 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { @Disabled @Test public void testExpandWithOpEquals() { - - - ValueSet result = myValueSetDao.expandByIdentifier("http://hl7.org/fhir/ValueSet/doc-typecodes", ""); + + + ValueSet result = myValueSetDao.expandByIdentifier("http://hl7.org/fhir/ValueSet/doc-typecodes", new ValueSetExpansionOptions().setFilter("")); ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(result)); } - - + + @Test public void testExpandWithCodesAndDisplayFilterPartialOnFilter() { CodeSystem codeSystem = createExternalCsDogs(); @@ -347,20 +344,20 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { valueSet.setUrl(URL_MY_VALUE_SET); valueSet.getCompose() .addInclude() - .setSystem(codeSystem.getUrl()) - .addConcept(new ConceptReferenceComponent().setCode("hello")) - .addConcept(new ConceptReferenceComponent().setCode("goodbye")); + .setSystem(codeSystem.getUrl()) + .addConcept(new ConceptReferenceComponent().setCode("hello")) + .addConcept(new ConceptReferenceComponent().setCode("goodbye")); valueSet.getCompose() .addInclude() - .setSystem(codeSystem.getUrl()) - .addFilter() - .setProperty("concept") - .setOp(FilterOperator.ISA) - .setValue("dogs"); - + .setSystem(codeSystem.getUrl()) + .addFilter() + .setProperty("concept") + .setOp(FilterOperator.ISA) + .setValue("dogs"); + myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, "lab"); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("lab")); logAndValidateValueSet(result); assertEquals(1, result.getExpansion().getTotal()); @@ -377,20 +374,20 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { valueSet.setUrl(URL_MY_VALUE_SET); valueSet.getCompose() .addInclude() - .setSystem(codeSystem.getUrl()) - .addConcept(new ConceptReferenceComponent().setCode("hello")) - .addConcept(new ConceptReferenceComponent().setCode("goodbye")); + .setSystem(codeSystem.getUrl()) + .addConcept(new ConceptReferenceComponent().setCode("hello")) + .addConcept(new ConceptReferenceComponent().setCode("goodbye")); valueSet.getCompose() .addInclude() - .setSystem(codeSystem.getUrl()) - .addFilter() - .setProperty("concept") - .setOp(FilterOperator.ISA) - .setValue("dogs"); - + .setSystem(codeSystem.getUrl()) + .addFilter() + .setProperty("concept") + .setOp(FilterOperator.ISA) + .setValue("dogs"); + myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, "hel"); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("hel")); logAndValidateValueSet(result); assertEquals(1, result.getExpansion().getTotal()); @@ -408,7 +405,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { valueSet.getCompose().addInclude().setSystem(codeSystem.getUrl()); myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, "lab"); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("lab")); logAndValidateValueSet(result); assertEquals(1, result.getExpansion().getTotal()); @@ -512,7 +509,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { @Test public void testExpandWithIsAInExternalValueSetReindex() { TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(true); - + createExternalCsAndLocalVs(); // We're making sure that a reindex doesn't wipe out all of the @@ -524,7 +521,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { myTerminologyDeferredStorageSvc.saveDeferred(); myTerminologyDeferredStorageSvc.saveDeferred(); - IValidationSupport.LookupCodeResult lookupResults = myCodeSystemDao.lookupCode(new StringType("childAA"), new StringType(URL_MY_CODE_SYSTEM),null, mySrd); + IValidationSupport.LookupCodeResult lookupResults = myCodeSystemDao.lookupCode(new StringType("childAA"), new StringType(URL_MY_CODE_SYSTEM), null, mySrd); assertEquals(true, lookupResults.isFound()); ValueSet vs = new ValueSet(); @@ -569,7 +566,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { assertEquals("Unknown CodeSystem URI \"http://example.com/my_code_systemAA\" referenced from ValueSet", e.getMessage()); } } - + @Test public void testExpandWithSystemAndCodesInExternalValueSet() { createExternalCsAndLocalVs(); @@ -628,9 +625,9 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { valueSet.setUrl(URL_MY_VALUE_SET); valueSet.getCompose() .addInclude() - .setSystem(codeSystem.getUrl()); + .setSystem(codeSystem.getUrl()); - ValueSet result = myValueSetDao.expand(valueSet, ""); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("")); logAndValidateValueSet(result); assertEquals(5, result.getExpansion().getTotal()); @@ -711,7 +708,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { cs.setResource(table); TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A"); cs.getConcepts().add(parentA); - myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), "http://snomed.info/sct", "Snomed CT", "SYSTEM VERSION" , cs, table); + myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), "http://snomed.info/sct", "Snomed CT", "SYSTEM VERSION", cs, table); StringType code = new StringType("ParentA"); StringType system = new StringType("http://snomed.info/sct"); @@ -767,7 +764,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { myResourceReindexingSvc.forceReindexingPass(); myTerminologyDeferredStorageSvc.saveDeferred(); myTerminologyDeferredStorageSvc.saveDeferred(); - + // Again myResourceReindexingSvc.markAllResourcesForReindexing(); myResourceReindexingSvc.forceReindexingPass(); @@ -917,7 +914,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { } - + @Test public void testSearchCodeBelowLocalCodesystem() { createLocalCsAndVs(); @@ -1018,7 +1015,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { SearchParameterMap params; ourLog.info("testSearchCodeInEmptyValueSet without status"); - + params = new SearchParameterMap(); params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN)); assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty()); @@ -1029,7 +1026,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN)); params.add(Observation.SP_STATUS, new TokenParam(null, "final")); assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty()); - + ourLog.info("testSearchCodeInEmptyValueSet done"); } @@ -1125,12 +1122,12 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test { SearchParameterMap params; ourLog.info("testSearchCodeInEmptyValueSet without status"); - + params = new SearchParameterMap(); params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN)); try { myObservationDao.search(params); - } catch(InvalidRequestException e) { + } catch (InvalidRequestException e) { assertEquals("Unable to expand imported value set: Unable to find imported value set http://non_existant_VS", e.getMessage()); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValueSetTest.java index 9500b926ac7..a12be5c911a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValueSetTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; @@ -99,7 +100,7 @@ public class FhirResourceDaoDstu3ValueSetTest extends BaseJpaDstu3Test { private boolean clearDeferredStorageQueue() { - if(!myTerminologyDeferredStorageSvc.isStorageQueueEmpty()) { + if (!myTerminologyDeferredStorageSvc.isStorageQueueEmpty()) { myTerminologyDeferredStorageSvc.saveAllDeferred(); return false; } else { @@ -155,7 +156,7 @@ public class FhirResourceDaoDstu3ValueSetTest extends BaseJpaDstu3Test { * Filter with display name */ - expanded = myValueSetDao.expand(myExtensionalVsId, ("systolic"), mySrd); + expanded = myValueSetDao.expand(myExtensionalVsId, new ValueSetExpansionOptions().setFilter("systolic"), mySrd); resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off @@ -169,7 +170,7 @@ public class FhirResourceDaoDstu3ValueSetTest extends BaseJpaDstu3Test { @Test @Disabled public void testExpandByIdentifier() { - ValueSet expanded = myValueSetDao.expandByIdentifier("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", "11378"); + ValueSet expanded = myValueSetDao.expandByIdentifier("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", new ValueSetExpansionOptions().setFilter("11378")); String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off @@ -188,7 +189,7 @@ public class FhirResourceDaoDstu3ValueSetTest extends BaseJpaDstu3Test { @Disabled public void testExpandByValueSet() throws IOException { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-3-vs.xml"); - ValueSet expanded = myValueSetDao.expand(toExpand, "11378"); + ValueSet expanded = myValueSetDao.expand(toExpand, new ValueSetExpansionOptions().setFilter("11378")); String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java index a35337b8a16..7e71ceab8ad 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java @@ -11,6 +11,7 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.test.utilities.docker.RequiresDocker; import org.hamcrest.Matchers; import org.hl7.fhir.instance.model.api.IIdType; @@ -169,7 +170,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest { ValueSet.ConceptSetComponent include = vs.getCompose().addInclude(); include.setSystem(URL_MY_CODE_SYSTEM); - ValueSet result = myValueSetDao.expand(vs, "child"); + ValueSet result = myValueSetDao.expand(vs, new ValueSetExpansionOptions().setFilter("child")); logAndValidateValueSet(result); @@ -187,7 +188,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest { ValueSet.ConceptSetComponent include = vs.getCompose().addInclude(); include.setSystem(URL_MY_CODE_SYSTEM); - ValueSet result = myValueSetDao.expand(vs, "chi"); + ValueSet result = myValueSetDao.expand(vs, new ValueSetExpansionOptions().setFilter("chi")); logAndValidateValueSet(result); @@ -205,7 +206,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest { ValueSet.ConceptSetComponent include = vs.getCompose().addInclude(); include.setSystem(URL_MY_CODE_SYSTEM); - ValueSet result = myValueSetDao.expand(vs, "hil"); + ValueSet result = myValueSetDao.expand(vs, new ValueSetExpansionOptions().setFilter("hil")); logAndValidateValueSet(result); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java index 5c653decb9a..661a4776d23 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; @@ -344,7 +345,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, ""); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("")); logAndValidateValueSet(result); assertEquals(2, result.getExpansion().getTotal()); @@ -379,7 +380,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, ""); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("")); logAndValidateValueSet(result); assertEquals(4, result.getExpansion().getTotal()); @@ -409,7 +410,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, "hel"); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("hel")); logAndValidateValueSet(result); assertEquals(1, result.getExpansion().getTotal()); @@ -427,7 +428,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { valueSet.getCompose().addInclude().setSystem(codeSystem.getUrl()); myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, "lab"); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("lab")); logAndValidateValueSet(result); assertEquals(1, result.getExpansion().getTotal()); @@ -457,7 +458,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { myValueSetDao.create(valueSet, mySrd); - ValueSet result = myValueSetDao.expand(valueSet, "lab"); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("lab")); logAndValidateValueSet(result); assertEquals(1, result.getExpansion().getTotal()); @@ -677,7 +678,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { public void testExpandWithOpEquals() { - ValueSet result = myValueSetDao.expandByIdentifier("http://hl7.org/fhir/ValueSet/doc-typecodes", ""); + ValueSet result = myValueSetDao.expandByIdentifier("http://hl7.org/fhir/ValueSet/doc-typecodes", new ValueSetExpansionOptions().setFilter("")); ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(result)); } @@ -795,7 +796,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { .addInclude() .setSystem(codeSystem.getUrl()); - ValueSet result = myValueSetDao.expand(valueSet, ""); + ValueSet result = myValueSetDao.expand(valueSet, new ValueSetExpansionOptions().setFilter("")); logAndValidateValueSet(result); assertEquals(5, result.getExpansion().getTotal()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java index c531a58cd9f..7a702e63b49 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; @@ -258,8 +259,6 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { obs.setEffective(DateTimeType.now()); obs.setStatus(ObservationStatus.FINAL); - OperationOutcome oo; - // Valid code obs.setValue(new Quantity().setSystem("http://cs").setCode("code1").setValue(123)); try { @@ -1702,7 +1701,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { myTermReadSvc.preExpandDeferredValueSetsToTerminologyTables(); - ValueSet expansion = myValueSetDao.expand(id, null, 0, 10000, mySrd); + ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(0, 10000); + ValueSet expansion = myValueSetDao.expand(id, options, mySrd); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); assertEquals(2, expansion.getExpansion().getContains().size()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java index 1311279136b..03ee310a43f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -221,7 +222,7 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test { * Filter with display name */ - expanded = myValueSetDao.expand(myExtensionalVsId, ("systolic"), mySrd); + expanded = myValueSetDao.expand(myExtensionalVsId, new ValueSetExpansionOptions().setFilter("systolic"), mySrd); resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java index 2172ae6d808..a8b1dab9229 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.r5; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; @@ -226,7 +227,7 @@ public class FhirResourceDaoR5ValueSetTest extends BaseJpaR5Test { * Filter with display name */ - expanded = myValueSetDao.expand(myExtensionalVsId, ("systolic"), mySrd); + expanded = myValueSetDao.expand(myExtensionalVsId, new ValueSetExpansionOptions().setFilter("systolic"), mySrd); resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); //@formatter:off diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2ValueSetTest.java index c577f3abc27..5398279ba6f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2ValueSetTest.java @@ -1,21 +1,6 @@ package ca.uhn.fhir.jpa.provider; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.stringContainsInOrder; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.fail; - -import java.io.IOException; - -import org.hl7.fhir.instance.model.api.IIdType; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; -import org.springframework.transaction.annotation.Transactional; - +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.model.dstu2.composite.CodingDt; import ca.uhn.fhir.model.dstu2.resource.Parameters; import ca.uhn.fhir.model.dstu2.resource.ValueSet; @@ -24,12 +9,25 @@ import ca.uhn.fhir.model.primitive.CodeDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.UriDt; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.util.TestUtil; +import org.hl7.fhir.instance.model.api.IIdType; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.transaction.annotation.Transactional; + +import java.io.IOException; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.stringContainsInOrder; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2Test { - private IIdType myExtensionalVsId; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderDstu2ValueSetTest.class); + private IIdType myExtensionalVsId; @BeforeEach @Transactional @@ -38,7 +36,7 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 upload.setId(""); myExtensionalVsId = myValueSetDao.create(upload, mySrd).getId().toUnqualifiedVersionless(); } - + @Test public void testValidateCodeOperationByCodeAndSystemInstance() { Parameters respParam = ourClient @@ -51,7 +49,7 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam); ourLog.info(resp); - + assertEquals(new BooleanDt(true), respParam.getParameter().get(0).getValue()); } @@ -67,7 +65,7 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam); ourLog.info(resp); - + assertEquals("name", respParam.getParameter().get(0).getName()); assertEquals(new StringDt("Unknown"), respParam.getParameter().get(0).getValue()); assertEquals("display", respParam.getParameter().get(1).getName()); @@ -75,7 +73,7 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 assertEquals("abstract", respParam.getParameter().get(2).getName()); assertEquals(new BooleanDt(false), respParam.getParameter().get(2).getValue()); } - + @Test @Disabled public void testLookupOperationForBuiltInCode() { @@ -89,7 +87,7 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam); ourLog.info(resp); - + assertEquals("name", respParam.getParameter().get(0).getName()); assertEquals(new StringDt("Unknown"), respParam.getParameter().get(0).getValue()); assertEquals("display", respParam.getParameter().get(1).getName()); @@ -109,7 +107,7 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam); ourLog.info(resp); - + assertEquals("name", respParam.getParameter().get(0).getName()); assertEquals(new StringDt("Unknown"), respParam.getParameter().get(0).getValue()); assertEquals("display", respParam.getParameter().get(1).getName()); @@ -179,20 +177,20 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); assertThat(resp, - stringContainsInOrder("", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - )); + stringContainsInOrder("", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + )); /* * Filter with display name @@ -206,12 +204,12 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 .execute(); expanded = (ValueSet) respParam.getParameter().get(0).getResource(); - expanded = myValueSetDao.expand(myExtensionalVsId, ("systolic"), mySrd); + expanded = myValueSetDao.expand(myExtensionalVsId, new ValueSetExpansionOptions().setFilter("systolic"), mySrd); resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); assertThat(resp, stringContainsInOrder( - "", - "")); + "", + "")); /* * Filter with code @@ -227,10 +225,10 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); assertThat(resp, stringContainsInOrder( - "", - "")); + "", + "")); } - + @Test public void testExpandByIdentifier() { Parameters respParam = ourClient @@ -245,8 +243,8 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); assertThat(resp, stringContainsInOrder( - "", - "")); + "", + "")); assertThat(resp, not(containsString(""))); } @@ -254,7 +252,7 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 @Test public void testExpandByValueSet() throws IOException { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-2.xml"); - + Parameters respParam = ourClient .operation() .onType(ValueSet.class) @@ -267,8 +265,8 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); assertThat(resp, stringContainsInOrder( - "", - "")); + "", + "")); assertThat(resp, not(containsString(""))); } @@ -316,5 +314,5 @@ public class ResourceProviderDstu2ValueSetTest extends BaseResourceProviderDstu2 } } - + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java index 500eba52486..85831734d67 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java @@ -8,9 +8,9 @@ import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; -import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; @@ -38,6 +38,7 @@ import org.hl7.fhir.r4.model.IntegerType; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.UriType; +import org.hl7.fhir.r4.model.UrlType; import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.r4.model.ValueSet.FilterOperator; @@ -53,12 +54,16 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.Nonnull; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest.URL_MY_CODE_SYSTEM; import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest.URL_MY_VALUE_SET; +import static ca.uhn.fhir.util.HapiExtensions.EXT_VALUESET_EXPANSION_MESSAGE; import static org.awaitility.Awaitility.await; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsStringIgnoringCase; import static org.hamcrest.Matchers.is; @@ -1037,6 +1042,165 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv testValidateCodeOperationByCodeAndSystemInstanceOnInstance(); } + + @Test + public void testExpandUsingHierarchy_PreStored_NotPreCalculated() { + createLocalCs(); + createHierarchicalVs(); + + myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless(); + + ValueSet expansion; + + // Non-hierarchical + myCaptureQueriesListener.clear(); + expansion = myClient + .operation() + .onType("ValueSet") + .named(JpaConstants.OPERATION_EXPAND) + .withParameter(Parameters.class, "url", new UrlType(URL_MY_VALUE_SET)) + .returnResourceType(ValueSet.class) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); + assertThat(toDirectCodes(expansion.getExpansion().getContains()), containsInAnyOrder("A", "AA", "AB", "AAA")); + assertEquals(19, myCaptureQueriesListener.getSelectQueries().size()); + assertEquals("ValueSet \"ValueSet.url[http://example.com/my_value_set]\" has not yet been pre-expanded. Performing in-memory expansion without parameters. Current status: NOT_EXPANDED | The ValueSet is waiting to be picked up and pre-expanded by a scheduled task.", expansion.getMeta().getExtensionString(EXT_VALUESET_EXPANSION_MESSAGE)); + + // Hierarchical + myCaptureQueriesListener.clear(); + expansion = myClient + .operation() + .onType("ValueSet") + .named(JpaConstants.OPERATION_EXPAND) + .withParameter(Parameters.class, "url", new UrlType(URL_MY_VALUE_SET)) + .andParameter(JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, new BooleanType("true")) + .returnResourceType(ValueSet.class) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); + assertThat(toDirectCodes(expansion.getExpansion().getContains()), containsInAnyOrder("A")); + assertThat(toDirectCodes(expansion.getExpansion().getContains().get(0).getContains()), containsInAnyOrder("AA", "AB")); + assertThat(toDirectCodes(expansion.getExpansion().getContains().get(0).getContains().stream().filter(t->t.getCode().equals("AA")).findFirst().orElseThrow(()->new IllegalArgumentException()).getContains()), containsInAnyOrder("AAA")); + assertEquals(16, myCaptureQueriesListener.getSelectQueries().size()); + + } + + @Test + public void testExpandUsingHierarchy_NotPreStored() { + createLocalCs(); + createHierarchicalVs(); + myLocalVs.setUrl(null); + + ValueSet expansion; + + // Non-hierarchical + myCaptureQueriesListener.clear(); + expansion = myClient + .operation() + .onType("ValueSet") + .named(JpaConstants.OPERATION_EXPAND) + .withParameter(Parameters.class, "valueSet", myLocalVs) + .returnResourceType(ValueSet.class) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); + assertThat(toDirectCodes(expansion.getExpansion().getContains()), containsInAnyOrder("A", "AA", "AB", "AAA")); + assertEquals(15, myCaptureQueriesListener.getSelectQueries().size()); + assertEquals(null, expansion.getMeta().getExtensionString(EXT_VALUESET_EXPANSION_MESSAGE)); + + // Hierarchical + myCaptureQueriesListener.clear(); + expansion = myClient + .operation() + .onType("ValueSet") + .named(JpaConstants.OPERATION_EXPAND) + .withParameter(Parameters.class, "valueSet", myLocalVs) + .andParameter(JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, new BooleanType("true")) + .returnResourceType(ValueSet.class) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); + assertThat(toDirectCodes(expansion.getExpansion().getContains()), containsInAnyOrder("A")); + assertThat(toDirectCodes(expansion.getExpansion().getContains().get(0).getContains()), containsInAnyOrder("AA", "AB")); + assertThat(toDirectCodes(expansion.getExpansion().getContains().get(0).getContains().stream().filter(t->t.getCode().equals("AA")).findFirst().orElseThrow(()->new IllegalArgumentException()).getContains()), containsInAnyOrder("AAA")); + assertEquals(14, myCaptureQueriesListener.getSelectQueries().size()); + + } + + @Test + public void testExpandUsingHierarchy_PreStored_PreCalculated() { + createLocalCs(); + createHierarchicalVs(); + + myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless(); + + ValueSet expansion; + + myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + + // Do a warm-up pass to precache anything that can be pre-cached + myClient + .operation() + .onType("ValueSet") + .named(JpaConstants.OPERATION_EXPAND) + .withParameter(Parameters.class, "url", new UrlType(URL_MY_VALUE_SET)) + .returnResourceType(ValueSet.class) + .execute(); + + // Non-hierarchical + myCaptureQueriesListener.clear(); + expansion = myClient + .operation() + .onType("ValueSet") + .named(JpaConstants.OPERATION_EXPAND) + .withParameter(Parameters.class, "url", new UrlType(URL_MY_VALUE_SET)) + .returnResourceType(ValueSet.class) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); + assertThat(toDirectCodes(expansion.getExpansion().getContains()), containsInAnyOrder("A", "AA", "AB", "AAA")); + assertEquals(3, myCaptureQueriesListener.getSelectQueries().size()); + assertEquals("ValueSet was expanded using a pre-calculated expansion", expansion.getMeta().getExtensionString(EXT_VALUESET_EXPANSION_MESSAGE)); + + // Hierarchical + myCaptureQueriesListener.clear(); + expansion = myClient + .operation() + .onType("ValueSet") + .named(JpaConstants.OPERATION_EXPAND) + .withParameter(Parameters.class, "url", new UrlType(URL_MY_VALUE_SET)) + .andParameter(JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, new BooleanType("true")) + .returnResourceType(ValueSet.class) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); + assertThat(toDirectCodes(expansion.getExpansion().getContains()), containsInAnyOrder("A")); + assertThat(toDirectCodes(expansion.getExpansion().getContains().get(0).getContains()), containsInAnyOrder("AA", "AB")); + assertThat(toDirectCodes(expansion.getExpansion().getContains().get(0).getContains().stream().filter(t->t.getCode().equals("AA")).findFirst().orElseThrow(()->new IllegalArgumentException()).getContains()), containsInAnyOrder("AAA")); + assertEquals(3, myCaptureQueriesListener.getSelectQueries().size()); + + } + + private void createHierarchicalVs() { + myLocalVs = new ValueSet(); + myLocalVs.setUrl(URL_MY_VALUE_SET); + myLocalVs + .getCompose() + .addInclude() + .setSystem(URL_MY_CODE_SYSTEM) + .addFilter() + .setProperty("concept") + .setOp(FilterOperator.ISA) + .setValue("A"); + myLocalVs + .getCompose() + .addInclude() + .setSystem(URL_MY_CODE_SYSTEM) + .addConcept() + .setCode("A"); + } + + public List toDirectCodes(List theContains) { + List collect = theContains.stream().map(t -> t.getCode()).collect(Collectors.toList()); + ourLog.info("Codes: {}", collect); + return collect; + } + private void testValidateCodeOperationByCodeAndSystemInstanceOnInstance() throws IOException { String url = ourServerBase + "/ValueSet/" + myLocalValueSetId.getIdPart() + "/$validate-code?system=" + diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java index 16d3dc3d930..c2eeddb15f0 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java @@ -42,7 +42,7 @@ public class ValueSetConceptAccumulatorTest { @Test public void testIncludeConcept() { for (int i = 0; i < 1000; i++) { - myAccumulator.includeConcept("sys", "code", "display"); + myAccumulator.includeConcept("sys", "code", "display", null, null); } verify(myValueSetConceptDao, times(1000)).save(any()); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java index 23f79370f69..7680e88baec 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java @@ -195,7 +195,7 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest { include.setSystem(CS_URL); myTermSvc.expandValueSet(null, vs, myValueSetCodeAccumulator); - verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection()); + verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection(), nullable(Long.class), nullable(String.class)); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java index 58634856eb6..f7ae6f80a2e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java @@ -7,7 +7,6 @@ import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; -import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.util.JpaConstants; @@ -38,11 +37,10 @@ import java.io.IOException; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; -import java.util.stream.Stream; import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -228,7 +226,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { // ValueSet by ID { myCaptureQueriesListener.clear(); - ValueSet expandedValueSet = myValueSetDao.expand(vsId, "display value 100", 0, 1000, mySrd); + ValueSetExpansionOptions options = ValueSetExpansionOptions.forOffsetAndCount(0, 1000).setFilter("display value 100"); + ValueSet expandedValueSet = myValueSetDao.expand(vsId, options, mySrd); List codes = expandedValueSet.getExpansion().getContains().stream().map(t -> t.getCode()).collect(Collectors.toList()); assertThat(codes.toString(), codes, containsInAnyOrder("code100", "code1000", "code1001", "code1002", "code1003", "code1004")); @@ -300,7 +299,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { ValueSet expandedValueSet = myTermSvc.expandValueSet(null, input); ourLog.debug("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); - assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), contains( "code99" )); + assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), contains("code99")); // Make sure we used the pre-expanded version List selectQueries = myCaptureQueriesListener.getSelectQueries(); @@ -786,7 +785,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size()); - assertThat(toCodes(expandedValueSet), is(equalTo(expandedConcepts.subList(1,expandedConcepts.size())))); + assertThat(toCodes(expandedValueSet), is(equalTo(expandedConcepts.subList(1, expandedConcepts.size())))); } @Test @@ -818,7 +817,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size()); - assertThat(toCodes(expandedValueSet), is(equalTo(expandedConcepts.subList(1,expandedConcepts.size())))); + assertThat(toCodes(expandedValueSet), is(equalTo(expandedConcepts.subList(1, expandedConcepts.size())))); } @Test @@ -953,7 +952,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { include.setSystem(CS_URL); myTermSvc.expandValueSet(null, vs, myValueSetCodeAccumulator); - verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection()); + verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection(), nullable(Long.class), nullable(String.class)); } @Test @@ -1433,5 +1432,4 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { } - } diff --git a/hapi-fhir-jpaserver-batch/pom.xml b/hapi-fhir-jpaserver-batch/pom.xml index 781f0e34231..b4436c319ec 100644 --- a/hapi-fhir-jpaserver-batch/pom.xml +++ b/hapi-fhir-jpaserver-batch/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-cql/pom.xml b/hapi-fhir-jpaserver-cql/pom.xml index d7df70c1bea..440cd4e30e4 100644 --- a/hapi-fhir-jpaserver-cql/pom.xml +++ b/hapi-fhir-jpaserver-cql/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -144,13 +144,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 855dfe0501a..236fc33d3ab 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -55,13 +55,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml index 767f5422571..ab2347ac99c 100644 --- a/hapi-fhir-jpaserver-migrate/pom.xml +++ b/hapi-fhir-jpaserver-migrate/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index de5042f9a43..4e14b8594d7 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -87,6 +87,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { version.onTable("HFJ_FORCED_ID").addIndex("20210309.2", "IDX_FORCEID_FID") .unique(false).withColumns("FORCED_ID"); + //-- ValueSet Concept Fulltext Indexing + version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.1", "INDEX_STATUS").nullable().type(ColumnTypeEnum.LONG); + version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.2", "SOURCE_DIRECT_PARENT_PIDS").nullable().type(ColumnTypeEnum.CLOB); + version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.3", "SOURCE_PID").nullable().type(ColumnTypeEnum.LONG); } private void init530() { diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index fc6b066de31..8c915ff27c6 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index 3cf09bb4efc..feb1fbf5bd2 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -218,6 +218,11 @@ public class JpaConstants { */ public static final String DEFAULT_PARTITION_NAME = "DEFAULT"; + /** + * Parameter for the $expand operation + */ + public static final String OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY = "includeHierarchy"; + /** * Non-instantiable */ diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 23fa53e26a7..0cccf93a45b 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index e1878aacf98..633843691b3 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index 7f20b4b2dc5..e00e23537da 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index 9314f0dbd85..ffbe7a1164c 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml @@ -164,7 +164,7 @@ ca.uhn.hapi.fhir hapi-fhir-converter - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 71c5ec9049a..6f7664a8f48 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index bdf8b874600..2524d2960e8 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 6a615025f8d..58483804fd3 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index cea3284ea07..bb0460f5f2c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index 729b752242d..ec97e7f6b0f 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT hapi-fhir-spring-boot-sample-client-okhttp diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index c70d6428ecc..b20cdc12fac 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT hapi-fhir-spring-boot-sample-server-jersey diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 01709779112..1643433e6ef 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT hapi-fhir-spring-boot-samples diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 81895a5dc6c..04d4fe39150 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 5798cb0a6f5..b3490ba702b 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index da1cff255bb..d80fff6935b 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 108cf5a0825..412d69d642c 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index f37268d1445..24f6bfd53d9 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index cffad70ec0e..035dabec631 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index fe444b0a1c9..67dd6dfeb59 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 72fd8b4b0ab..8760ce684e3 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index bbb540c8ded..654c33c28fe 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 305b835d2d1..1dfb066f20b 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index bef3d99f342..6aae3187f80 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index 0e98d4c8ead..ecdbbd4a635 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 77b38d6c862..560106316d0 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 222c7a6db50..68ae7002bdd 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index c3885abf4c3..9ac87f4b79d 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index 1bbb77e484d..b180c3de905 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index d21005fb478..5e946ea683b 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml @@ -58,37 +58,37 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu3 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-hl7org-dstu2 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r4 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r5 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu2 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu3 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-r4 - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT org.apache.velocity diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 9497e5e2aea..b4a180ec8ca 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 8dfad0cf1c8..36b38d441db 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. https://hapifhir.io diff --git a/restful-server-example/pom.xml b/restful-server-example/pom.xml index 051b36516f5..37c4a874033 100644 --- a/restful-server-example/pom.xml +++ b/restful-server-example/pom.xml @@ -8,7 +8,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../pom.xml diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index c6e0d2c9d18..4e51e91c572 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index b399e7eae87..20a96ef3c9e 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index bb5a3413ee0..897bcbb0b07 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE4-SNAPSHOT + 5.4.0-PRE5-SNAPSHOT ../../pom.xml From 7cf061d76b6801243df07eebf5c6d9d878182927 Mon Sep 17 00:00:00 2001 From: jamesagnew Date: Tue, 6 Apr 2021 21:15:11 -0400 Subject: [PATCH 25/61] Jetty version bump --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 36b38d441db..d53c158c6e0 100644 --- a/pom.xml +++ b/pom.xml @@ -782,7 +782,7 @@ 3.16.0 3.0.0 - 9.4.38.v20210224 + 9.4.39.v20210325 3.0.2 5.7.0 6.5.4 From 462d9bc6c452789b5b6d71cfe0e7cfd802013f3b Mon Sep 17 00:00:00 2001 From: James Agnew Date: Thu, 8 Apr 2021 05:59:05 -0400 Subject: [PATCH 26/61] Fix compartment defs for R5 (#2528) * Fix compartment defs for R5 * Add changelog * Test fix --- .../context/RuntimeResourceDefinition.java | 5 + .../5_4_0/2528-fix-r5-compartment-defs.yaml | 5 + .../ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java | 98 ++++++----- .../r5/AuthorizationInterceptorJpaR5Test.java | 163 ++++++++++++++++++ 4 files changed, 231 insertions(+), 40 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2528-fix-r5-compartment-defs.yaml create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/AuthorizationInterceptorJpaR5Test.java diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java index e9cab8f455b..de20094444f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java @@ -187,6 +187,11 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini for (RuntimeSearchParam next : searchParams) { if (next.getProvidesMembershipInCompartments() != null) { for (String nextCompartment : next.getProvidesMembershipInCompartments()) { + + if (nextCompartment.startsWith("Base FHIR compartment definition for ")) { + nextCompartment = nextCompartment.substring("Base FHIR compartment definition for ".length()); + } + if (!compartmentNameToSearchParams.containsKey(nextCompartment)) { compartmentNameToSearchParams.put(nextCompartment, new ArrayList<>()); } diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2528-fix-r5-compartment-defs.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2528-fix-r5-compartment-defs.yaml new file mode 100644 index 00000000000..a900b658f38 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2528-fix-r5-compartment-defs.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 2528 +title: "An issue with compartment definitions in R5 models was fixed. This issue caused some authorization + rules to reject valid requests. Thanks to Patrick Palacin for reporting!" diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index 9f1a8677c14..8d40288bc2b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -49,7 +49,6 @@ import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.provider.r5.JpaSystemProviderR5; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; @@ -61,7 +60,6 @@ import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; -import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5; @@ -73,6 +71,7 @@ import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.server.BasePagingProvider; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; +import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ValidationResult; @@ -81,6 +80,7 @@ import org.hibernate.search.mapper.orm.Search; import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r5.model.AllergyIntolerance; import org.hl7.fhir.r5.model.Appointment; import org.hl7.fhir.r5.model.AuditEvent; @@ -164,10 +164,9 @@ import static org.mockito.Mockito.mock; @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = {TestR5Config.class}) -public abstract class BaseJpaR5Test extends BaseJpaTest { +public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuilder { private static IValidationSupport ourJpaValidationSupportChainR5; private static IFhirResourceDaoValueSet ourValueSetDao; - @Autowired protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc; @Autowired @@ -416,6 +415,23 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { @Autowired private IBulkDataExportSvc myBulkDataExportSvc; + @Override + public IIdType doCreateResource(IBaseResource theResource) { + IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); + return dao.create(theResource, mySrd).getId().toUnqualifiedVersionless(); + } + + @Override + public IIdType doUpdateResource(IBaseResource theResource) { + IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); + return dao.update(theResource, mySrd).getId().toUnqualifiedVersionless(); + } + + @Override + public FhirContext getFhirContext() { + return myFhirCtx; + } + @AfterEach() public void afterCleanupDao() { myDaoConfig.setExpireSearchResults(new DaoConfig().isExpireSearchResults()); @@ -463,7 +479,7 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { @BeforeEach public void beforeFlushFT() { runInTransaction(() -> { - SearchSession searchSession = Search.session(myEntityManager); + SearchSession searchSession = Search.session(myEntityManager); searchSession.workspace(ResourceTable.class).purge(); // searchSession.workspace(ResourceIndexedSearchParamString.class).purge(); searchSession.indexingPlan().execute(); @@ -524,6 +540,43 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { dao.update(resourceParsed); } + protected ValueSet.ValueSetExpansionContainsComponent assertExpandedValueSetContainsConcept(ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) { + List contains = theValueSet.getExpansion().getContains(); + + Stream stream = contains.stream(); + if (theSystem != null) { + stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem())); + } + if (theCode != null) { + stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode())); + } + if (theDisplay != null) { + stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay())); + } + if (theDesignationCount != null) { + stream = stream.filter(concept -> concept.getDesignation().size() == theDesignationCount); + } + + Optional first = stream.findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount); + fail(failureMessage); + return null; + } else { + return first.get(); + } + } + + public List getExpandedConceptsByValueSetUrl(String theValuesetUrl) { + return runInTransaction(() -> { + List valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl); + assertEquals(1, valueSets.size()); + TermValueSet valueSet = valueSets.get(0); + List concepts = valueSet.getConcepts(); + return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList()); + }); + } + @AfterAll public static void afterClassClearContextBaseJpaR5Test() { ourValueSetDao.purgeCaches(); @@ -634,40 +687,5 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { String[] uuidParams = params.get(Constants.PARAM_PAGINGACTION); return uuidParams[0]; } - protected ValueSet.ValueSetExpansionContainsComponent assertExpandedValueSetContainsConcept(ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) { - List contains = theValueSet.getExpansion().getContains(); - - Stream stream = contains.stream(); - if (theSystem != null) { - stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem())); - } - if (theCode != null ) { - stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode())); - } - if (theDisplay != null){ - stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay())); - } - if (theDesignationCount != null) { - stream = stream.filter(concept -> concept.getDesignation().size() == theDesignationCount); - } - - Optional first = stream.findFirst(); - if (!first.isPresent()) { - String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount); - fail(failureMessage); - return null; - } else { - return first.get(); - } - } - public List getExpandedConceptsByValueSetUrl(String theValuesetUrl) { - return runInTransaction(() -> { - List valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl); - assertEquals(1, valueSets.size()); - TermValueSet valueSet = valueSets.get(0); - List concepts = valueSet.getConcepts(); - return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList()); - }); - } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/AuthorizationInterceptorJpaR5Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/AuthorizationInterceptorJpaR5Test.java new file mode 100644 index 00000000000..e6ca3e66e27 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/AuthorizationInterceptorJpaR5Test.java @@ -0,0 +1,163 @@ +package ca.uhn.fhir.jpa.provider.r5; + +import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; +import ca.uhn.fhir.jpa.provider.r5.BaseResourceProviderR5Test; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.MethodOutcome; +import ca.uhn.fhir.rest.api.RestOperationTypeEnum; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor; +import ca.uhn.fhir.rest.server.exceptions.AuthenticationException; +import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; +import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; +import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor; +import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule; +import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRuleTester; +import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum; +import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import ca.uhn.fhir.util.UrlUtil; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r5.model.Bundle; +import org.hl7.fhir.r5.model.CodeableConcept; +import org.hl7.fhir.r5.model.Coding; +import org.hl7.fhir.r5.model.Condition; +import org.hl7.fhir.r5.model.Encounter; +import org.hl7.fhir.r5.model.Enumerations; +import org.hl7.fhir.r5.model.IdType; +import org.hl7.fhir.r5.model.Identifier; +import org.hl7.fhir.r5.model.Observation; +import org.hl7.fhir.r5.model.Organization; +import org.hl7.fhir.r5.model.Parameters; +import org.hl7.fhir.r5.model.Patient; +import org.hl7.fhir.r5.model.Practitioner; +import org.hl7.fhir.r5.model.Reference; +import org.hl7.fhir.r5.model.StringType; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; + +public class AuthorizationInterceptorJpaR5Test extends BaseResourceProviderR5Test { + + private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptorJpaR5Test.class); + + @BeforeEach + @Override + public void before() throws Exception { + super.before(); + myDaoConfig.setAllowMultipleDelete(true); + myDaoConfig.setExpungeEnabled(true); + myDaoConfig.setDeleteExpungeEnabled(true); + } + + /** + * See #503 + */ + @Test + public void testDeleteIsAllowedForCompartment() { + + Patient patient = new Patient(); + patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100"); + patient.addName().setFamily("Tester").addGiven("Raghad"); + final IIdType id = myClient.create().resource(patient).execute().getId(); + + Observation obsInCompartment = new Observation(); + obsInCompartment.setStatus(Enumerations.ObservationStatus.FINAL); + obsInCompartment.getSubject().setReferenceElement(id.toUnqualifiedVersionless()); + IIdType obsInCompartmentId = myClient.create().resource(obsInCompartment).execute().getId().toUnqualifiedVersionless(); + + Observation obsNotInCompartment = new Observation(); + obsNotInCompartment.setStatus(Enumerations.ObservationStatus.FINAL); + IIdType obsNotInCompartmentId = myClient.create().resource(obsNotInCompartment).execute().getId().toUnqualifiedVersionless(); + + ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) { + @Override + public List buildRuleList(RequestDetails theRequestDetails) { + return new RuleBuilder() + .allow().delete().resourcesOfType(Observation.class).inCompartment("Patient", id).andThen() + .deny().delete().allResources().withAnyId().andThen() + .allowAll() + .build(); + } + }); + + myClient.delete().resourceById(obsInCompartmentId.toUnqualifiedVersionless()).execute(); + + try { + myClient.delete().resourceById(obsNotInCompartmentId.toUnqualifiedVersionless()).execute(); + fail(); + } catch (ForbiddenOperationException e) { + // good + } + } + + @Test + public void testDeleteIsAllowedForCompartmentUsingTransaction() { + + Patient patient = new Patient(); + patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100"); + patient.addName().setFamily("Tester").addGiven("Raghad"); + final IIdType id = myClient.create().resource(patient).execute().getId(); + + Observation obsInCompartment = new Observation(); + obsInCompartment.setStatus(Enumerations.ObservationStatus.FINAL); + obsInCompartment.getSubject().setReferenceElement(id.toUnqualifiedVersionless()); + IIdType obsInCompartmentId = myClient.create().resource(obsInCompartment).execute().getId().toUnqualifiedVersionless(); + + Observation obsNotInCompartment = new Observation(); + obsNotInCompartment.setStatus(Enumerations.ObservationStatus.FINAL); + IIdType obsNotInCompartmentId = myClient.create().resource(obsNotInCompartment).execute().getId().toUnqualifiedVersionless(); + + ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) { + @Override + public List buildRuleList(RequestDetails theRequestDetails) { + return new RuleBuilder() + .allow().delete().resourcesOfType(Observation.class).inCompartment("Patient", id).andThen() + .allow().transaction().withAnyOperation().andApplyNormalRules().andThen() + .denyAll() + .build(); + } + }); + + Bundle bundle; + + bundle = new Bundle(); + bundle.setType(Bundle.BundleType.TRANSACTION); + bundle.addEntry().getRequest().setMethod(Bundle.HTTPVerb.DELETE).setUrl(obsInCompartmentId.toUnqualifiedVersionless().getValue()); + myClient.transaction().withBundle(bundle).execute(); + + try { + bundle = new Bundle(); + bundle.setType(Bundle.BundleType.TRANSACTION); + bundle.addEntry().getRequest().setMethod(Bundle.HTTPVerb.DELETE).setUrl(obsNotInCompartmentId.toUnqualifiedVersionless().getValue()); + myClient.transaction().withBundle(bundle).execute(); + fail(); + } catch (ForbiddenOperationException e) { + // good + } + } + + +} From f70648484a59a79855922362873d31017499ec39 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Thu, 8 Apr 2021 12:28:42 -0400 Subject: [PATCH 27/61] Avoid a deadlock uploading terminology in postgres (#2529) * Fix #2493 * Try to fix intermittent test failure --- ...-avoid-deadlock-uploading-terminology.yaml | 5 +++ .../jpa/term/TermDeferredStorageSvcImpl.java | 6 ++- .../method/ConformanceMethodBindingTest.java | 41 +++++++++++++------ 3 files changed, 37 insertions(+), 15 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2493-avoid-deadlock-uploading-terminology.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2493-avoid-deadlock-uploading-terminology.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2493-avoid-deadlock-uploading-terminology.yaml new file mode 100644 index 00000000000..1672b9c204e --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2493-avoid-deadlock-uploading-terminology.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 2493 +title: "A database deadlock in Postgresql was observed when uploading large terminology CodeSystems using + deferred uploading. Thanks to Tyge Folke Nielsen for reporting and suggesting a fix!" diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java index 1b9f9305109..6e36e2fb331 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java @@ -60,7 +60,9 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.Queue; import java.util.UUID; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; @@ -68,7 +70,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class); final private List myDeferredCodeSystemsDeletions = Collections.synchronizedList(new ArrayList<>()); - final private List myDeferredCodeSystemVersionsDeletions = Collections.synchronizedList(new ArrayList<>()); + final private Queue myDeferredCodeSystemVersionsDeletions = new ConcurrentLinkedQueue<>(); final private List myDeferredConcepts = Collections.synchronizedList(new ArrayList<>()); final private List myDeferredValueSets = Collections.synchronizedList(new ArrayList<>()); final private List myDeferredConceptMaps = Collections.synchronizedList(new ArrayList<>()); @@ -498,7 +500,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { } @Override - public synchronized void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) { + public void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) { myDeferredCodeSystemVersionsDeletions.add(theCodeSystemVersion); } diff --git a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBindingTest.java b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBindingTest.java index 2b3d8d09938..af3c956d1d8 100644 --- a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBindingTest.java +++ b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBindingTest.java @@ -1,35 +1,50 @@ package ca.uhn.fhir.rest.server.method; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.rest.annotation.Metadata; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.IRestfulServer; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import com.google.common.collect.Lists; import org.hl7.fhir.instance.model.api.IBaseConformance; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; import javax.servlet.http.HttpServletRequest; import java.lang.reflect.Method; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +@ExtendWith(MockitoExtension.class) public class ConformanceMethodBindingTest { - private FhirContext fhirContext; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + protected ServletRequestDetails mySrd; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private FhirContext myFhirContext; private ConformanceMethodBinding conformanceMethodBinding; @BeforeEach public void setUp() { - fhirContext = mock(FhirContext.class); } private T init(T theCapabilityStatementProvider) throws NoSuchMethodException { T provider = spy(theCapabilityStatementProvider); Method method = provider.getClass().getDeclaredMethod("getServerConformance", HttpServletRequest.class, RequestDetails.class); - conformanceMethodBinding = new ConformanceMethodBinding(method, fhirContext, provider); + conformanceMethodBinding = new ConformanceMethodBinding(method, myFhirContext, provider); return provider; } @@ -37,9 +52,9 @@ public class ConformanceMethodBindingTest { public void invokeServerCached() throws NoSuchMethodException { TestResourceProvider provider = init(new TestResourceProvider()); - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(1)).getServerConformance(any(), any()); - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(1)).getServerConformance(any(), any()); } @@ -47,12 +62,12 @@ public class ConformanceMethodBindingTest { public void invokeServerCacheExpires() throws NoSuchMethodException { TestResourceProviderSmallCache provider = init(new TestResourceProviderSmallCache()); - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(1)).getServerConformance(any(), any()); sleepAtLeast(20); - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, timeout(10000).times(2)).getServerConformance(any(), any()); } @@ -61,10 +76,10 @@ public class ConformanceMethodBindingTest { public void invokeServerCacheDisabled() throws NoSuchMethodException { TestResourceProviderNoCache provider = init(new TestResourceProviderNoCache()); - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(1)).getServerConformance(any(), any()); - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(2)).getServerConformance(any(), any()); } @@ -72,11 +87,11 @@ public class ConformanceMethodBindingTest { public void invokeServerCacheDisabledInSuperclass() throws NoSuchMethodException { TestResourceProviderNoCache2 provider = init(new TestResourceProviderNoCache2()); - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(1)).getServerConformance(any(), any()); // We currently don't scan the annotation on the superclass...Perhaps we should - conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mock(RequestDetails.class, RETURNS_DEEP_STUBS), new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); + conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), mySrd, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(1)).getServerConformance(any(), any()); } @@ -84,7 +99,7 @@ public class ConformanceMethodBindingTest { public void invokeServerNotCached_ClientControlled() throws NoSuchMethodException { TestResourceProvider provider = init(new TestResourceProvider()); - RequestDetails requestDetails = mock(RequestDetails.class, RETURNS_DEEP_STUBS); + RequestDetails requestDetails = mySrd; when(requestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)).thenReturn(Lists.newArrayList(Constants.CACHE_CONTROL_NO_CACHE)); conformanceMethodBinding.invokeServer(mock(IRestfulServer.class, RETURNS_DEEP_STUBS), requestDetails, new Object[]{mock(HttpServletRequest.class), mock(RequestDetails.class)}); verify(provider, times(1)).getServerConformance(any(), any()); From edfe1ae8e6ff65bbc79918d847c17f006cf21389 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Thu, 8 Apr 2021 13:23:21 -0400 Subject: [PATCH 28/61] Add lastupdated to capabilitystatement (#2530) * Add lastupdated to capabilitystatement * Test fix * Test fix --- ...rCapabilityStatementProviderJpaR4Test.java | 17 +- .../ServerCapabilityStatementProvider.java | 1258 ++++----- ...rverCapabilityStatementProviderR4Test.java | 2338 +++++++++-------- 3 files changed, 1843 insertions(+), 1770 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerCapabilityStatementProviderJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerCapabilityStatementProviderJpaR4Test.java index 6c73dff4c1d..9f4d6945e5c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerCapabilityStatementProviderJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerCapabilityStatementProviderJpaR4Test.java @@ -65,6 +65,19 @@ public class ServerCapabilityStatementProviderJpaR4Test extends BaseResourceProv } + @Test + public void testLastUpdatedIncluded() { + CapabilityStatement cs = myClient.capabilities().ofType(CapabilityStatement.class).execute(); + + List fooSearchParams = findSearchParams(cs, "Patient", "_lastUpdated"); + assertEquals(1, fooSearchParams.size()); + assertEquals("_lastUpdated", fooSearchParams.get(0).getName()); + assertEquals("http://localhost:" + ourPort + "/fhir/context/SearchParameter/Patient-_lastUpdated", fooSearchParams.get(0).getDefinition()); + assertEquals("Only return resources which were last updated as specified by the given range", fooSearchParams.get(0).getDocumentation()); + assertEquals(Enumerations.SearchParamType.DATE, fooSearchParams.get(0).getType()); + + } + @Override @AfterEach public void after() throws Exception { @@ -119,10 +132,10 @@ public class ServerCapabilityStatementProviderJpaR4Test extends BaseResourceProv .execute(); List includes = findIncludes(cs, "Patient"); - assertThat(includes.toString(), includes, contains("*", "Patient:general-practitioner", "Patient:link", "Patient:organization")); + assertThat(includes.toString(), includes, containsInAnyOrder("*", "Patient:general-practitioner", "Patient:link", "Patient:organization")); includes = findIncludes(cs, "Observation"); - assertThat(includes.toString(), includes, contains("*", "Observation:based-on", "Observation:derived-from", "Observation:device", "Observation:encounter", "Observation:focus", "Observation:foo", "Observation:has-member", "Observation:part-of", "Observation:patient", "Observation:performer", "Observation:specimen", "Observation:subject")); + assertThat(includes.toString(), includes, containsInAnyOrder("*", "Observation:based-on", "Observation:derived-from", "Observation:device", "Observation:encounter", "Observation:focus", "Observation:foo", "Observation:has-member", "Observation:part-of", "Observation:patient", "Observation:performer", "Observation:specimen", "Observation:subject")); List revIncludes = findRevIncludes(cs, "Patient"); assertThat(revIncludes.toString(), revIncludes, hasItems( diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java index 59913098b86..230763bc8a6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java @@ -40,6 +40,7 @@ import org.slf4j.LoggerFactory; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -82,624 +83,641 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; */ public class ServerCapabilityStatementProvider implements IServerConformanceProvider { - public static final boolean DEFAULT_REST_RESOURCE_REV_INCLUDES_ENABLED = true; - private static final Logger ourLog = LoggerFactory.getLogger(ServerCapabilityStatementProvider.class); - private final FhirContext myContext; - private final RestfulServer myServer; - private final ISearchParamRegistry mySearchParamRegistry; - private final RestfulServerConfiguration myServerConfiguration; - private final IValidationSupport myValidationSupport; - private String myPublisher = "Not provided"; - private boolean myRestResourceRevIncludesEnabled = DEFAULT_REST_RESOURCE_REV_INCLUDES_ENABLED; - - /** - * Constructor - */ - public ServerCapabilityStatementProvider(RestfulServer theServer) { - myServer = theServer; - myContext = theServer.getFhirContext(); - mySearchParamRegistry = null; - myServerConfiguration = null; - myValidationSupport = null; - } - - /** - * Constructor - */ - public ServerCapabilityStatementProvider(FhirContext theContext, RestfulServerConfiguration theServerConfiguration) { - myContext = theContext; - myServerConfiguration = theServerConfiguration; - mySearchParamRegistry = null; - myServer = null; - myValidationSupport = null; - } - - /** - * Constructor - */ - public ServerCapabilityStatementProvider(RestfulServer theRestfulServer, ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) { - myContext = theRestfulServer.getFhirContext(); - mySearchParamRegistry = theSearchParamRegistry; - myServer = theRestfulServer; - myServerConfiguration = null; - myValidationSupport = theValidationSupport; - } - - private void checkBindingForSystemOps(FhirTerser theTerser, IBase theRest, Set theSystemOps, BaseMethodBinding theMethodBinding) { - RestOperationTypeEnum restOperationType = theMethodBinding.getRestOperationType(); - if (restOperationType.isSystemLevel()) { - String sysOp = restOperationType.getCode(); - if (theSystemOps.contains(sysOp) == false) { - theSystemOps.add(sysOp); - IBase interaction = theTerser.addElement(theRest, "interaction"); - theTerser.addElement(interaction, "code", sysOp); - } - } - } - - - private String conformanceDate(RestfulServerConfiguration theServerConfiguration) { - IPrimitiveType buildDate = theServerConfiguration.getConformanceDate(); - if (buildDate != null && buildDate.getValue() != null) { - try { - return buildDate.getValueAsString(); - } catch (DataFormatException e) { - // fall through - } - } - return InstantDt.withCurrentTime().getValueAsString(); - } - - private RestfulServerConfiguration getServerConfiguration() { - if (myServer != null) { - return myServer.createConfiguration(); - } - return myServerConfiguration; - } - - - /** - * Gets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The - * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. - */ - public String getPublisher() { - return myPublisher; - } - - /** - * Sets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The - * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. - */ - public void setPublisher(String thePublisher) { - myPublisher = thePublisher; - } - - @Override - @Metadata - public IBaseConformance getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { - - HttpServletRequest servletRequest = null; - if (theRequestDetails instanceof ServletRequestDetails) { - servletRequest = ((ServletRequestDetails) theRequestDetails).getServletRequest(); - } - - RestfulServerConfiguration configuration = getServerConfiguration(); - Bindings bindings = configuration.provideBindings(); - - IBaseConformance retVal = (IBaseConformance) myContext.getResourceDefinition("CapabilityStatement").newInstance(); - - FhirTerser terser = myContext.newTerser(); - - TreeMultimap resourceTypeToSupportedProfiles = getSupportedProfileMultimap(terser); - - terser.addElement(retVal, "id", UUID.randomUUID().toString()); - terser.addElement(retVal, "name", "RestServer"); - terser.addElement(retVal, "publisher", myPublisher); - terser.addElement(retVal, "date", conformanceDate(configuration)); - terser.addElement(retVal, "fhirVersion", myContext.getVersion().getVersion().getFhirVersionString()); - - ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); - String serverBase = configuration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); - terser.addElement(retVal, "implementation.url", serverBase); - terser.addElement(retVal, "implementation.description", configuration.getImplementationDescription()); - terser.addElement(retVal, "kind", "instance"); - terser.addElement(retVal, "software.name", configuration.getServerName()); - terser.addElement(retVal, "software.version", configuration.getServerVersion()); - if (myContext.isFormatXmlSupported()) { - terser.addElement(retVal, "format", Constants.CT_FHIR_XML_NEW); - terser.addElement(retVal, "format", Constants.FORMAT_XML); - } - if (myContext.isFormatJsonSupported()) { - terser.addElement(retVal, "format", Constants.CT_FHIR_JSON_NEW); - terser.addElement(retVal, "format", Constants.FORMAT_JSON); - } - if (myContext.isFormatRdfSupported()) { - terser.addElement(retVal, "format", Constants.CT_RDF_TURTLE); - terser.addElement(retVal, "format", Constants.FORMAT_TURTLE); - } - terser.addElement(retVal, "status", "active"); - - IBase rest = terser.addElement(retVal, "rest"); - terser.addElement(rest, "mode", "server"); - - Set systemOps = new HashSet<>(); - Set operationNames = new HashSet<>(); - - Map>> resourceToMethods = configuration.collectMethodBindings(); - Map> resourceNameToSharedSupertype = configuration.getNameToSharedSupertype(); - - TreeMultimap resourceNameToIncludes = TreeMultimap.create(); - TreeMultimap resourceNameToRevIncludes = TreeMultimap.create(); - for (Entry>> nextEntry : resourceToMethods.entrySet()) { - String resourceName = nextEntry.getKey(); - for (BaseMethodBinding nextMethod : nextEntry.getValue()) { - if (nextMethod instanceof SearchMethodBinding) { - resourceNameToIncludes.putAll(resourceName, nextMethod.getIncludes()); - resourceNameToRevIncludes.putAll(resourceName, nextMethod.getRevIncludes()); - } - } - - } - - for (Entry>> nextEntry : resourceToMethods.entrySet()) { - - String resourceName = nextEntry.getKey(); - if (nextEntry.getKey().isEmpty() == false) { - Set resourceOps = new HashSet<>(); - IBase resource = terser.addElement(rest, "resource"); - - postProcessRestResource(terser, resource, resourceName); - - RuntimeResourceDefinition def; - FhirContext context = configuration.getFhirContext(); - if (resourceNameToSharedSupertype.containsKey(resourceName)) { - def = context.getResourceDefinition(resourceNameToSharedSupertype.get(resourceName)); - } else { - def = context.getResourceDefinition(resourceName); - } - terser.addElement(resource, "type", def.getName()); - terser.addElement(resource, "profile", def.getResourceProfile(serverBase)); - - for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { - RestOperationTypeEnum resOpCode = nextMethodBinding.getRestOperationType(); - if (resOpCode.isTypeLevel() || resOpCode.isInstanceLevel()) { - String resOp; - resOp = resOpCode.getCode(); - if (resourceOps.contains(resOp) == false) { - resourceOps.add(resOp); - IBase interaction = terser.addElement(resource, "interaction"); - terser.addElement(interaction, "code", resOp); - } - if (RestOperationTypeEnum.VREAD.equals(resOpCode)) { - // vread implies read - resOp = "read"; - if (resourceOps.contains(resOp) == false) { - resourceOps.add(resOp); - IBase interaction = terser.addElement(resource, "interaction"); - terser.addElement(interaction, "code", resOp); - } - } - } - - if (nextMethodBinding.isSupportsConditional()) { - switch (resOpCode) { - case CREATE: - terser.setElement(resource, "conditionalCreate", "true"); - break; - case DELETE: - if (nextMethodBinding.isSupportsConditionalMultiple()) { - terser.setElement(resource, "conditionalDelete", "multiple"); - } else { - terser.setElement(resource, "conditionalDelete", "single"); - } - break; - case UPDATE: - terser.setElement(resource, "conditionalUpdate", "true"); - break; - case HISTORY_INSTANCE: - case HISTORY_SYSTEM: - case HISTORY_TYPE: - case READ: - case SEARCH_SYSTEM: - case SEARCH_TYPE: - case TRANSACTION: - case VALIDATE: - case VREAD: - case METADATA: - case META_ADD: - case META: - case META_DELETE: - case PATCH: - case BATCH: - case ADD_TAGS: - case DELETE_TAGS: - case GET_TAGS: - case GET_PAGE: - case GRAPHQL_REQUEST: - case EXTENDED_OPERATION_SERVER: - case EXTENDED_OPERATION_TYPE: - case EXTENDED_OPERATION_INSTANCE: - default: - break; - } - } - - checkBindingForSystemOps(terser, rest, systemOps, nextMethodBinding); - - if (nextMethodBinding instanceof SearchMethodBinding) { - SearchMethodBinding methodBinding = (SearchMethodBinding) nextMethodBinding; - if (methodBinding.getQueryName() != null) { - String queryName = bindings.getNamedSearchMethodBindingToName().get(methodBinding); - if (operationNames.add(queryName)) { - IBase operation = terser.addElement(rest, "operation"); - terser.addElement(operation, "name", methodBinding.getQueryName()); - terser.addElement(operation, "definition", (getOperationDefinitionPrefix(theRequestDetails) + "OperationDefinition/" + queryName)); - } - } - } else if (nextMethodBinding instanceof OperationMethodBinding) { - OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; - String opName = bindings.getOperationBindingToName().get(methodBinding); - // Only add each operation (by name) once - if (operationNames.add(opName)) { - IBase operation = terser.addElement(rest, "operation"); - terser.addElement(operation, "name", methodBinding.getName().substring(1)); - terser.addElement(operation, "definition", getOperationDefinitionPrefix(theRequestDetails) + "OperationDefinition/" + opName); - } - } - - } - - ISearchParamRegistry searchParamRegistry; - if (mySearchParamRegistry != null) { - searchParamRegistry = mySearchParamRegistry; - } else if (myServerConfiguration != null) { - searchParamRegistry = myServerConfiguration; - } else { - searchParamRegistry = myServer.createConfiguration(); - } - - Map searchParams = searchParamRegistry.getActiveSearchParams(resourceName); - for (RuntimeSearchParam next : searchParams.values()) { - IBase searchParam = terser.addElement(resource, "searchParam"); - terser.addElement(searchParam, "name", next.getName()); - terser.addElement(searchParam, "type", next.getParamType().getCode()); - if (isNotBlank(next.getDescription())) { - terser.addElement(searchParam, "documentation", next.getDescription()); - } - - String spUri = next.getUri(); - if (isBlank(spUri) && servletRequest != null) { - String id; - if (next.getId() != null) { - id = next.getId().toUnqualifiedVersionless().getValue(); - } else { - id = resourceName + "-" + next.getName(); - } - spUri = configuration.getServerAddressStrategy().determineServerBase(servletRequest.getServletContext(), servletRequest) + "/" + id; - } - if (isNotBlank(spUri)) { - terser.addElement(searchParam, "definition", spUri); - } - } - - // Add Include to CapabilityStatement.rest.resource - NavigableSet resourceIncludes = resourceNameToIncludes.get(resourceName); - if (resourceIncludes.isEmpty()) { - List includes = searchParams - .values() - .stream() - .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) - .map(t -> resourceName + ":" + t.getName()) - .sorted() - .collect(Collectors.toList()); - terser.addElement(resource, "searchInclude", "*"); - for (String nextInclude : includes) { - terser.addElement(resource, "searchInclude", nextInclude); - } - } else { - for (String resourceInclude : resourceIncludes) { - terser.addElement(resource, "searchInclude", resourceInclude); - } - } - - // Add RevInclude to CapabilityStatement.rest.resource - if (myRestResourceRevIncludesEnabled) { - NavigableSet resourceRevIncludes = resourceNameToRevIncludes.get(resourceName); - if (resourceRevIncludes.isEmpty()) { - TreeSet revIncludes = new TreeSet<>(); - for (String nextResourceName : resourceToMethods.keySet()) { - if (isBlank(nextResourceName)) { - continue; - } - - for (RuntimeSearchParam t : searchParamRegistry - .getActiveSearchParams(nextResourceName) - .values()) { - if (t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { - if (isNotBlank(t.getName())) { - boolean appropriateTarget = false; - if (t.getTargets().contains(resourceName) || t.getTargets().isEmpty()) { - appropriateTarget = true; - } - - if (appropriateTarget) { - revIncludes.add(nextResourceName + ":" + t.getName()); - } - } - } - } - } - for (String nextInclude : revIncludes) { - terser.addElement(resource, "searchRevInclude", nextInclude); - } - } else { - for (String resourceInclude : resourceRevIncludes) { - terser.addElement(resource, "searchRevInclude", resourceInclude); - } - } - } - - // Add SupportedProfile to CapabilityStatement.rest.resource - for (String supportedProfile : resourceTypeToSupportedProfiles.get(resourceName)) { - terser.addElement(resource, "supportedProfile", supportedProfile); - } - - } else { - for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { - checkBindingForSystemOps(terser, rest, systemOps, nextMethodBinding); - if (nextMethodBinding instanceof OperationMethodBinding) { - OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; - String opName = bindings.getOperationBindingToName().get(methodBinding); - if (operationNames.add(opName)) { - ourLog.debug("Found bound operation: {}", opName); - IBase operation = terser.addElement(rest, "operation"); - terser.addElement(operation, "name", methodBinding.getName().substring(1)); - terser.addElement(operation, "definition", getOperationDefinitionPrefix(theRequestDetails) + "OperationDefinition/" + opName); - } - } - } - } - - postProcessRest(terser, rest); - - } - - postProcess(terser, retVal); - - return retVal; - } - - private TreeMultimap getSupportedProfileMultimap(FhirTerser terser) { - TreeMultimap resourceTypeToSupportedProfiles = TreeMultimap.create(); - if (myValidationSupport != null) { - List allStructureDefinitions = myValidationSupport.fetchAllNonBaseStructureDefinitions(); - if (allStructureDefinitions != null) { - for (IBaseResource next : allStructureDefinitions) { - String kind = terser.getSinglePrimitiveValueOrNull(next, "kind"); - String url = terser.getSinglePrimitiveValueOrNull(next, "url"); - String baseDefinition = defaultString(terser.getSinglePrimitiveValueOrNull(next, "baseDefinition")); - if ("resource".equals(kind) && isNotBlank(url)) { - - // Don't include the base resource definitions in the supported profile list - This isn't helpful - if (baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/DomainResource") || baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/Resource")) { - continue; - } - - String resourceType = terser.getSinglePrimitiveValueOrNull(next, "snapshot.element.path"); - if (isBlank(resourceType)) { - resourceType = terser.getSinglePrimitiveValueOrNull(next, "differential.element.path"); - } - - if (isNotBlank(resourceType)) { - resourceTypeToSupportedProfiles.put(resourceType, url); - } - } - } - } - } - return resourceTypeToSupportedProfiles; - } - - /** - * Subclasses may override - */ - protected void postProcess(FhirTerser theTerser, IBaseConformance theCapabilityStatement) { - // nothing - } - - /** - * Subclasses may override - */ - protected void postProcessRest(FhirTerser theTerser, IBase theRest) { - // nothing - } - - /** - * Subclasses may override - */ - protected void postProcessRestResource(FhirTerser theTerser, IBase theResource, String theResourceName) { - // nothing - } - - protected String getOperationDefinitionPrefix(RequestDetails theRequestDetails) { - if (theRequestDetails == null) { - return ""; - } - return theRequestDetails.getServerBaseForRequest() + "/"; - } - - - @Read(typeName = "OperationDefinition") - public IBaseResource readOperationDefinition(@IdParam IIdType theId, RequestDetails theRequestDetails) { - if (theId == null || theId.hasIdPart() == false) { - throw new ResourceNotFoundException(theId); - } - RestfulServerConfiguration configuration = getServerConfiguration(); - Bindings bindings = configuration.provideBindings(); - - List operationBindings = bindings.getOperationNameToBindings().get(theId.getIdPart()); - if (operationBindings != null && !operationBindings.isEmpty()) { - return readOperationDefinitionForOperation(operationBindings); - } - List searchBindings = bindings.getSearchNameToBindings().get(theId.getIdPart()); - if (searchBindings != null && !searchBindings.isEmpty()) { - return readOperationDefinitionForNamedSearch(searchBindings); - } - throw new ResourceNotFoundException(theId); - } - - private IBaseResource readOperationDefinitionForNamedSearch(List bindings) { - IBaseResource op = myContext.getResourceDefinition("OperationDefinition").newInstance(); - FhirTerser terser = myContext.newTerser(); - - terser.addElement(op, "status", "active"); - terser.addElement(op, "kind", "query"); - terser.addElement(op, "affectsState", "false"); - - terser.addElement(op, "instance", "false"); - - Set inParams = new HashSet<>(); - - String operationCode = null; - for (SearchMethodBinding binding : bindings) { - if (isNotBlank(binding.getDescription())) { - terser.addElement(op, "description", binding.getDescription()); - } - if (isBlank(binding.getResourceProviderResourceName())) { - terser.addElement(op, "system", "true"); - terser.addElement(op, "type", "false"); - } else { - terser.addElement(op, "system", "false"); - terser.addElement(op, "type", "true"); - terser.addElement(op, "resource", binding.getResourceProviderResourceName()); - } - - if (operationCode == null) { - operationCode = binding.getQueryName(); - } - - for (IParameter nextParamUntyped : binding.getParameters()) { - if (nextParamUntyped instanceof SearchParameter) { - SearchParameter nextParam = (SearchParameter) nextParamUntyped; - if (!inParams.add(nextParam.getName())) { - continue; - } - - IBase param = terser.addElement(op, "parameter"); - terser.addElement(param, "use", "in"); - terser.addElement(param, "type", "string"); - terser.addElement(param, "searchType", nextParam.getParamType().getCode()); - terser.addElement(param, "min", nextParam.isRequired() ? "1" : "0"); - terser.addElement(param, "max", "1"); - terser.addElement(param, "name", nextParam.getName()); - } - } - - } - - terser.addElement(op, "code", operationCode); - terser.addElement(op, "name", "Search_" + operationCode); - - return op; - } - - private IBaseResource readOperationDefinitionForOperation(List bindings) { - IBaseResource op = myContext.getResourceDefinition("OperationDefinition").newInstance(); - FhirTerser terser = myContext.newTerser(); - - terser.addElement(op, "status", "active"); - terser.addElement(op, "kind", "operation"); - - boolean systemLevel = false; - boolean typeLevel = false; - boolean instanceLevel = false; - boolean affectsState = false; - String description = null; - String code = null; - String name; - - Set resourceNames = new TreeSet<>(); - Set inParams = new HashSet<>(); - Set outParams = new HashSet<>(); - - for (OperationMethodBinding sharedDescription : bindings) { - if (isNotBlank(sharedDescription.getDescription()) && isBlank(description)) { - description = sharedDescription.getDescription(); - } - if (sharedDescription.isCanOperateAtInstanceLevel()) { - instanceLevel = true; - } - if (sharedDescription.isCanOperateAtServerLevel()) { - systemLevel = true; - } - if (sharedDescription.isCanOperateAtTypeLevel()) { - typeLevel = true; - } - if (!sharedDescription.isIdempotent()) { - affectsState |= true; - } - - code = sharedDescription.getName().substring(1); - - if (isNotBlank(sharedDescription.getResourceName())) { - resourceNames.add(sharedDescription.getResourceName()); - } - - for (IParameter nextParamUntyped : sharedDescription.getParameters()) { - if (nextParamUntyped instanceof OperationParameter) { - OperationParameter nextParam = (OperationParameter) nextParamUntyped; - if (!inParams.add(nextParam.getName())) { - continue; - } - IBase param = terser.addElement(op, "parameter"); - terser.addElement(param, "use", "in"); - if (nextParam.getParamType() != null) { - terser.addElement(param, "type", nextParam.getParamType()); - } - if (nextParam.getSearchParamType() != null) { - terser.addElement(param, "searchType", nextParam.getSearchParamType()); - } - terser.addElement(param, "min", Integer.toString(nextParam.getMin())); - terser.addElement(param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); - terser.addElement(param, "name", nextParam.getName()); - } - } - - for (ReturnType nextParam : sharedDescription.getReturnParams()) { - if (!outParams.add(nextParam.getName())) { - continue; - } - IBase param = terser.addElement(op, "parameter"); - terser.addElement(param, "use", "out"); - if (nextParam.getType() != null) { - terser.addElement(param, "type", nextParam.getType()); - } - terser.addElement(param, "min", Integer.toString(nextParam.getMin())); - terser.addElement(param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); - terser.addElement(param, "name", nextParam.getName()); - } - } - - name = "Operation_" + code; - - terser.addElements(op, "resource", resourceNames); - terser.addElement(op, "name", name); - terser.addElement(op, "code", code); - terser.addElement(op, "description", description); - terser.addElement(op, "affectsState", Boolean.toString(affectsState)); - terser.addElement(op, "system", Boolean.toString(systemLevel)); - terser.addElement(op, "type", Boolean.toString(typeLevel)); - terser.addElement(op, "instance", Boolean.toString(instanceLevel)); - - return op; - } - - @Override - public void setRestfulServer(RestfulServer theRestfulServer) { - // ignore - } - - public void setRestResourceRevIncludesEnabled(boolean theRestResourceRevIncludesEnabled) { - myRestResourceRevIncludesEnabled = theRestResourceRevIncludesEnabled; - } + public static final boolean DEFAULT_REST_RESOURCE_REV_INCLUDES_ENABLED = true; + private static final Logger ourLog = LoggerFactory.getLogger(ServerCapabilityStatementProvider.class); + private final FhirContext myContext; + private final RestfulServer myServer; + private final ISearchParamRegistry mySearchParamRegistry; + private final RestfulServerConfiguration myServerConfiguration; + private final IValidationSupport myValidationSupport; + private String myPublisher = "Not provided"; + private boolean myRestResourceRevIncludesEnabled = DEFAULT_REST_RESOURCE_REV_INCLUDES_ENABLED; + + /** + * Constructor + */ + public ServerCapabilityStatementProvider(RestfulServer theServer) { + myServer = theServer; + myContext = theServer.getFhirContext(); + mySearchParamRegistry = null; + myServerConfiguration = null; + myValidationSupport = null; + } + + /** + * Constructor + */ + public ServerCapabilityStatementProvider(FhirContext theContext, RestfulServerConfiguration theServerConfiguration) { + myContext = theContext; + myServerConfiguration = theServerConfiguration; + mySearchParamRegistry = null; + myServer = null; + myValidationSupport = null; + } + + /** + * Constructor + */ + public ServerCapabilityStatementProvider(RestfulServer theRestfulServer, ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) { + myContext = theRestfulServer.getFhirContext(); + mySearchParamRegistry = theSearchParamRegistry; + myServer = theRestfulServer; + myServerConfiguration = null; + myValidationSupport = theValidationSupport; + } + + private void checkBindingForSystemOps(FhirTerser theTerser, IBase theRest, Set theSystemOps, BaseMethodBinding theMethodBinding) { + RestOperationTypeEnum restOperationType = theMethodBinding.getRestOperationType(); + if (restOperationType.isSystemLevel()) { + String sysOp = restOperationType.getCode(); + if (theSystemOps.contains(sysOp) == false) { + theSystemOps.add(sysOp); + IBase interaction = theTerser.addElement(theRest, "interaction"); + theTerser.addElement(interaction, "code", sysOp); + } + } + } + + + private String conformanceDate(RestfulServerConfiguration theServerConfiguration) { + IPrimitiveType buildDate = theServerConfiguration.getConformanceDate(); + if (buildDate != null && buildDate.getValue() != null) { + try { + return buildDate.getValueAsString(); + } catch (DataFormatException e) { + // fall through + } + } + return InstantDt.withCurrentTime().getValueAsString(); + } + + private RestfulServerConfiguration getServerConfiguration() { + if (myServer != null) { + return myServer.createConfiguration(); + } + return myServerConfiguration; + } + + + /** + * Gets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The + * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. + */ + public String getPublisher() { + return myPublisher; + } + + /** + * Sets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The + * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. + */ + public void setPublisher(String thePublisher) { + myPublisher = thePublisher; + } + + @Override + @Metadata + public IBaseConformance getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { + + HttpServletRequest servletRequest = null; + if (theRequestDetails instanceof ServletRequestDetails) { + servletRequest = ((ServletRequestDetails) theRequestDetails).getServletRequest(); + } + + RestfulServerConfiguration configuration = getServerConfiguration(); + Bindings bindings = configuration.provideBindings(); + + IBaseConformance retVal = (IBaseConformance) myContext.getResourceDefinition("CapabilityStatement").newInstance(); + + FhirTerser terser = myContext.newTerser(); + + TreeMultimap resourceTypeToSupportedProfiles = getSupportedProfileMultimap(terser); + + terser.addElement(retVal, "id", UUID.randomUUID().toString()); + terser.addElement(retVal, "name", "RestServer"); + terser.addElement(retVal, "publisher", myPublisher); + terser.addElement(retVal, "date", conformanceDate(configuration)); + terser.addElement(retVal, "fhirVersion", myContext.getVersion().getVersion().getFhirVersionString()); + + ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); + String serverBase = configuration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); + terser.addElement(retVal, "implementation.url", serverBase); + terser.addElement(retVal, "implementation.description", configuration.getImplementationDescription()); + terser.addElement(retVal, "kind", "instance"); + terser.addElement(retVal, "software.name", configuration.getServerName()); + terser.addElement(retVal, "software.version", configuration.getServerVersion()); + if (myContext.isFormatXmlSupported()) { + terser.addElement(retVal, "format", Constants.CT_FHIR_XML_NEW); + terser.addElement(retVal, "format", Constants.FORMAT_XML); + } + if (myContext.isFormatJsonSupported()) { + terser.addElement(retVal, "format", Constants.CT_FHIR_JSON_NEW); + terser.addElement(retVal, "format", Constants.FORMAT_JSON); + } + if (myContext.isFormatRdfSupported()) { + terser.addElement(retVal, "format", Constants.CT_RDF_TURTLE); + terser.addElement(retVal, "format", Constants.FORMAT_TURTLE); + } + terser.addElement(retVal, "status", "active"); + + IBase rest = terser.addElement(retVal, "rest"); + terser.addElement(rest, "mode", "server"); + + Set systemOps = new HashSet<>(); + Set operationNames = new HashSet<>(); + + Map>> resourceToMethods = configuration.collectMethodBindings(); + Map> resourceNameToSharedSupertype = configuration.getNameToSharedSupertype(); + + TreeMultimap resourceNameToIncludes = TreeMultimap.create(); + TreeMultimap resourceNameToRevIncludes = TreeMultimap.create(); + for (Entry>> nextEntry : resourceToMethods.entrySet()) { + String resourceName = nextEntry.getKey(); + for (BaseMethodBinding nextMethod : nextEntry.getValue()) { + if (nextMethod instanceof SearchMethodBinding) { + resourceNameToIncludes.putAll(resourceName, nextMethod.getIncludes()); + resourceNameToRevIncludes.putAll(resourceName, nextMethod.getRevIncludes()); + } + } + + } + + for (Entry>> nextEntry : resourceToMethods.entrySet()) { + + String resourceName = nextEntry.getKey(); + if (nextEntry.getKey().isEmpty() == false) { + Set resourceOps = new HashSet<>(); + IBase resource = terser.addElement(rest, "resource"); + + postProcessRestResource(terser, resource, resourceName); + + RuntimeResourceDefinition def; + FhirContext context = configuration.getFhirContext(); + if (resourceNameToSharedSupertype.containsKey(resourceName)) { + def = context.getResourceDefinition(resourceNameToSharedSupertype.get(resourceName)); + } else { + def = context.getResourceDefinition(resourceName); + } + terser.addElement(resource, "type", def.getName()); + terser.addElement(resource, "profile", def.getResourceProfile(serverBase)); + + for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { + RestOperationTypeEnum resOpCode = nextMethodBinding.getRestOperationType(); + if (resOpCode.isTypeLevel() || resOpCode.isInstanceLevel()) { + String resOp; + resOp = resOpCode.getCode(); + if (resourceOps.contains(resOp) == false) { + resourceOps.add(resOp); + IBase interaction = terser.addElement(resource, "interaction"); + terser.addElement(interaction, "code", resOp); + } + if (RestOperationTypeEnum.VREAD.equals(resOpCode)) { + // vread implies read + resOp = "read"; + if (resourceOps.contains(resOp) == false) { + resourceOps.add(resOp); + IBase interaction = terser.addElement(resource, "interaction"); + terser.addElement(interaction, "code", resOp); + } + } + } + + if (nextMethodBinding.isSupportsConditional()) { + switch (resOpCode) { + case CREATE: + terser.setElement(resource, "conditionalCreate", "true"); + break; + case DELETE: + if (nextMethodBinding.isSupportsConditionalMultiple()) { + terser.setElement(resource, "conditionalDelete", "multiple"); + } else { + terser.setElement(resource, "conditionalDelete", "single"); + } + break; + case UPDATE: + terser.setElement(resource, "conditionalUpdate", "true"); + break; + case HISTORY_INSTANCE: + case HISTORY_SYSTEM: + case HISTORY_TYPE: + case READ: + case SEARCH_SYSTEM: + case SEARCH_TYPE: + case TRANSACTION: + case VALIDATE: + case VREAD: + case METADATA: + case META_ADD: + case META: + case META_DELETE: + case PATCH: + case BATCH: + case ADD_TAGS: + case DELETE_TAGS: + case GET_TAGS: + case GET_PAGE: + case GRAPHQL_REQUEST: + case EXTENDED_OPERATION_SERVER: + case EXTENDED_OPERATION_TYPE: + case EXTENDED_OPERATION_INSTANCE: + default: + break; + } + } + + checkBindingForSystemOps(terser, rest, systemOps, nextMethodBinding); + + if (nextMethodBinding instanceof SearchMethodBinding) { + SearchMethodBinding methodBinding = (SearchMethodBinding) nextMethodBinding; + if (methodBinding.getQueryName() != null) { + String queryName = bindings.getNamedSearchMethodBindingToName().get(methodBinding); + if (operationNames.add(queryName)) { + IBase operation = terser.addElement(rest, "operation"); + terser.addElement(operation, "name", methodBinding.getQueryName()); + terser.addElement(operation, "definition", (getOperationDefinitionPrefix(theRequestDetails) + "OperationDefinition/" + queryName)); + } + } + } else if (nextMethodBinding instanceof OperationMethodBinding) { + OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; + String opName = bindings.getOperationBindingToName().get(methodBinding); + // Only add each operation (by name) once + if (operationNames.add(opName)) { + IBase operation = terser.addElement(rest, "operation"); + terser.addElement(operation, "name", methodBinding.getName().substring(1)); + terser.addElement(operation, "definition", getOperationDefinitionPrefix(theRequestDetails) + "OperationDefinition/" + opName); + } + } + + } + + + ISearchParamRegistry serverConfiguration; + if (myServerConfiguration != null) { + serverConfiguration = myServerConfiguration; + } else { + serverConfiguration = myServer.createConfiguration(); + } + + /* + * If we have an explicit registry (which will be the case in the JPA server) we use it as priority, + * but also fill in any gaps using params from the server itself. This makes sure we include + * global params like _lastUpdated + */ + Map searchParams; + ISearchParamRegistry searchParamRegistry; + if (mySearchParamRegistry != null) { + searchParamRegistry = mySearchParamRegistry; + searchParams = new HashMap<>(mySearchParamRegistry.getActiveSearchParams(resourceName)); + for (Entry nextBuiltInSp : serverConfiguration.getActiveSearchParams(resourceName).entrySet()) { + if (nextBuiltInSp.getKey().startsWith("_") && !searchParams.containsKey(nextBuiltInSp.getKey())) { + searchParams.put(nextBuiltInSp.getKey(), nextBuiltInSp.getValue()); + } + } + } else { + searchParamRegistry = serverConfiguration; + searchParams = serverConfiguration.getActiveSearchParams(resourceName); + } + + + for (RuntimeSearchParam next : searchParams.values()) { + IBase searchParam = terser.addElement(resource, "searchParam"); + terser.addElement(searchParam, "name", next.getName()); + terser.addElement(searchParam, "type", next.getParamType().getCode()); + if (isNotBlank(next.getDescription())) { + terser.addElement(searchParam, "documentation", next.getDescription()); + } + + String spUri = next.getUri(); + if (isBlank(spUri) && servletRequest != null) { + String id; + if (next.getId() != null) { + id = next.getId().toUnqualifiedVersionless().getValue(); + } else { + id = resourceName + "-" + next.getName(); + } + spUri = configuration.getServerAddressStrategy().determineServerBase(servletRequest.getServletContext(), servletRequest) + "/" + id; + } + if (isNotBlank(spUri)) { + terser.addElement(searchParam, "definition", spUri); + } + } + + // Add Include to CapabilityStatement.rest.resource + NavigableSet resourceIncludes = resourceNameToIncludes.get(resourceName); + if (resourceIncludes.isEmpty()) { + List includes = searchParams + .values() + .stream() + .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) + .map(t -> resourceName + ":" + t.getName()) + .sorted() + .collect(Collectors.toList()); + terser.addElement(resource, "searchInclude", "*"); + for (String nextInclude : includes) { + terser.addElement(resource, "searchInclude", nextInclude); + } + } else { + for (String resourceInclude : resourceIncludes) { + terser.addElement(resource, "searchInclude", resourceInclude); + } + } + + // Add RevInclude to CapabilityStatement.rest.resource + if (myRestResourceRevIncludesEnabled) { + NavigableSet resourceRevIncludes = resourceNameToRevIncludes.get(resourceName); + if (resourceRevIncludes.isEmpty()) { + TreeSet revIncludes = new TreeSet<>(); + for (String nextResourceName : resourceToMethods.keySet()) { + if (isBlank(nextResourceName)) { + continue; + } + + for (RuntimeSearchParam t : searchParamRegistry.getActiveSearchParams(nextResourceName).values()) { + if (t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { + if (isNotBlank(t.getName())) { + boolean appropriateTarget = false; + if (t.getTargets().contains(resourceName) || t.getTargets().isEmpty()) { + appropriateTarget = true; + } + + if (appropriateTarget) { + revIncludes.add(nextResourceName + ":" + t.getName()); + } + } + } + } + } + for (String nextInclude : revIncludes) { + terser.addElement(resource, "searchRevInclude", nextInclude); + } + } else { + for (String resourceInclude : resourceRevIncludes) { + terser.addElement(resource, "searchRevInclude", resourceInclude); + } + } + } + + // Add SupportedProfile to CapabilityStatement.rest.resource + for (String supportedProfile : resourceTypeToSupportedProfiles.get(resourceName)) { + terser.addElement(resource, "supportedProfile", supportedProfile); + } + + } else { + for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { + checkBindingForSystemOps(terser, rest, systemOps, nextMethodBinding); + if (nextMethodBinding instanceof OperationMethodBinding) { + OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; + String opName = bindings.getOperationBindingToName().get(methodBinding); + if (operationNames.add(opName)) { + ourLog.debug("Found bound operation: {}", opName); + IBase operation = terser.addElement(rest, "operation"); + terser.addElement(operation, "name", methodBinding.getName().substring(1)); + terser.addElement(operation, "definition", getOperationDefinitionPrefix(theRequestDetails) + "OperationDefinition/" + opName); + } + } + } + } + + postProcessRest(terser, rest); + + } + + postProcess(terser, retVal); + + return retVal; + } + + private TreeMultimap getSupportedProfileMultimap(FhirTerser terser) { + TreeMultimap resourceTypeToSupportedProfiles = TreeMultimap.create(); + if (myValidationSupport != null) { + List allStructureDefinitions = myValidationSupport.fetchAllNonBaseStructureDefinitions(); + if (allStructureDefinitions != null) { + for (IBaseResource next : allStructureDefinitions) { + String kind = terser.getSinglePrimitiveValueOrNull(next, "kind"); + String url = terser.getSinglePrimitiveValueOrNull(next, "url"); + String baseDefinition = defaultString(terser.getSinglePrimitiveValueOrNull(next, "baseDefinition")); + if ("resource".equals(kind) && isNotBlank(url)) { + + // Don't include the base resource definitions in the supported profile list - This isn't helpful + if (baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/DomainResource") || baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/Resource")) { + continue; + } + + String resourceType = terser.getSinglePrimitiveValueOrNull(next, "snapshot.element.path"); + if (isBlank(resourceType)) { + resourceType = terser.getSinglePrimitiveValueOrNull(next, "differential.element.path"); + } + + if (isNotBlank(resourceType)) { + resourceTypeToSupportedProfiles.put(resourceType, url); + } + } + } + } + } + return resourceTypeToSupportedProfiles; + } + + /** + * Subclasses may override + */ + protected void postProcess(FhirTerser theTerser, IBaseConformance theCapabilityStatement) { + // nothing + } + + /** + * Subclasses may override + */ + protected void postProcessRest(FhirTerser theTerser, IBase theRest) { + // nothing + } + + /** + * Subclasses may override + */ + protected void postProcessRestResource(FhirTerser theTerser, IBase theResource, String theResourceName) { + // nothing + } + + protected String getOperationDefinitionPrefix(RequestDetails theRequestDetails) { + if (theRequestDetails == null) { + return ""; + } + return theRequestDetails.getServerBaseForRequest() + "/"; + } + + + @Read(typeName = "OperationDefinition") + public IBaseResource readOperationDefinition(@IdParam IIdType theId, RequestDetails theRequestDetails) { + if (theId == null || theId.hasIdPart() == false) { + throw new ResourceNotFoundException(theId); + } + RestfulServerConfiguration configuration = getServerConfiguration(); + Bindings bindings = configuration.provideBindings(); + + List operationBindings = bindings.getOperationNameToBindings().get(theId.getIdPart()); + if (operationBindings != null && !operationBindings.isEmpty()) { + return readOperationDefinitionForOperation(operationBindings); + } + List searchBindings = bindings.getSearchNameToBindings().get(theId.getIdPart()); + if (searchBindings != null && !searchBindings.isEmpty()) { + return readOperationDefinitionForNamedSearch(searchBindings); + } + throw new ResourceNotFoundException(theId); + } + + private IBaseResource readOperationDefinitionForNamedSearch(List bindings) { + IBaseResource op = myContext.getResourceDefinition("OperationDefinition").newInstance(); + FhirTerser terser = myContext.newTerser(); + + terser.addElement(op, "status", "active"); + terser.addElement(op, "kind", "query"); + terser.addElement(op, "affectsState", "false"); + + terser.addElement(op, "instance", "false"); + + Set inParams = new HashSet<>(); + + String operationCode = null; + for (SearchMethodBinding binding : bindings) { + if (isNotBlank(binding.getDescription())) { + terser.addElement(op, "description", binding.getDescription()); + } + if (isBlank(binding.getResourceProviderResourceName())) { + terser.addElement(op, "system", "true"); + terser.addElement(op, "type", "false"); + } else { + terser.addElement(op, "system", "false"); + terser.addElement(op, "type", "true"); + terser.addElement(op, "resource", binding.getResourceProviderResourceName()); + } + + if (operationCode == null) { + operationCode = binding.getQueryName(); + } + + for (IParameter nextParamUntyped : binding.getParameters()) { + if (nextParamUntyped instanceof SearchParameter) { + SearchParameter nextParam = (SearchParameter) nextParamUntyped; + if (!inParams.add(nextParam.getName())) { + continue; + } + + IBase param = terser.addElement(op, "parameter"); + terser.addElement(param, "use", "in"); + terser.addElement(param, "type", "string"); + terser.addElement(param, "searchType", nextParam.getParamType().getCode()); + terser.addElement(param, "min", nextParam.isRequired() ? "1" : "0"); + terser.addElement(param, "max", "1"); + terser.addElement(param, "name", nextParam.getName()); + } + } + + } + + terser.addElement(op, "code", operationCode); + terser.addElement(op, "name", "Search_" + operationCode); + + return op; + } + + private IBaseResource readOperationDefinitionForOperation(List bindings) { + IBaseResource op = myContext.getResourceDefinition("OperationDefinition").newInstance(); + FhirTerser terser = myContext.newTerser(); + + terser.addElement(op, "status", "active"); + terser.addElement(op, "kind", "operation"); + + boolean systemLevel = false; + boolean typeLevel = false; + boolean instanceLevel = false; + boolean affectsState = false; + String description = null; + String code = null; + String name; + + Set resourceNames = new TreeSet<>(); + Set inParams = new HashSet<>(); + Set outParams = new HashSet<>(); + + for (OperationMethodBinding sharedDescription : bindings) { + if (isNotBlank(sharedDescription.getDescription()) && isBlank(description)) { + description = sharedDescription.getDescription(); + } + if (sharedDescription.isCanOperateAtInstanceLevel()) { + instanceLevel = true; + } + if (sharedDescription.isCanOperateAtServerLevel()) { + systemLevel = true; + } + if (sharedDescription.isCanOperateAtTypeLevel()) { + typeLevel = true; + } + if (!sharedDescription.isIdempotent()) { + affectsState |= true; + } + + code = sharedDescription.getName().substring(1); + + if (isNotBlank(sharedDescription.getResourceName())) { + resourceNames.add(sharedDescription.getResourceName()); + } + + for (IParameter nextParamUntyped : sharedDescription.getParameters()) { + if (nextParamUntyped instanceof OperationParameter) { + OperationParameter nextParam = (OperationParameter) nextParamUntyped; + if (!inParams.add(nextParam.getName())) { + continue; + } + IBase param = terser.addElement(op, "parameter"); + terser.addElement(param, "use", "in"); + if (nextParam.getParamType() != null) { + terser.addElement(param, "type", nextParam.getParamType()); + } + if (nextParam.getSearchParamType() != null) { + terser.addElement(param, "searchType", nextParam.getSearchParamType()); + } + terser.addElement(param, "min", Integer.toString(nextParam.getMin())); + terser.addElement(param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); + terser.addElement(param, "name", nextParam.getName()); + } + } + + for (ReturnType nextParam : sharedDescription.getReturnParams()) { + if (!outParams.add(nextParam.getName())) { + continue; + } + IBase param = terser.addElement(op, "parameter"); + terser.addElement(param, "use", "out"); + if (nextParam.getType() != null) { + terser.addElement(param, "type", nextParam.getType()); + } + terser.addElement(param, "min", Integer.toString(nextParam.getMin())); + terser.addElement(param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); + terser.addElement(param, "name", nextParam.getName()); + } + } + + name = "Operation_" + code; + + terser.addElements(op, "resource", resourceNames); + terser.addElement(op, "name", name); + terser.addElement(op, "code", code); + terser.addElement(op, "description", description); + terser.addElement(op, "affectsState", Boolean.toString(affectsState)); + terser.addElement(op, "system", Boolean.toString(systemLevel)); + terser.addElement(op, "type", Boolean.toString(typeLevel)); + terser.addElement(op, "instance", Boolean.toString(instanceLevel)); + + return op; + } + + @Override + public void setRestfulServer(RestfulServer theRestfulServer) { + // ignore + } + + public void setRestResourceRevIncludesEnabled(boolean theRestResourceRevIncludesEnabled) { + myRestResourceRevIncludesEnabled = theRestResourceRevIncludesEnabled; + } } diff --git a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ServerCapabilityStatementProviderR4Test.java b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ServerCapabilityStatementProviderR4Test.java index 1cbef706f14..f151466e3aa 100644 --- a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ServerCapabilityStatementProviderR4Test.java +++ b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ServerCapabilityStatementProviderR4Test.java @@ -100,1303 +100,1345 @@ import static org.mockito.Mockito.when; public class ServerCapabilityStatementProviderR4Test { - public static final String PATIENT_SUB = "PatientSub"; - public static final String PATIENT_SUB_SUB = "PatientSubSub"; - public static final String PATIENT_SUB_SUB_2 = "PatientSubSub2"; - public static final String PATIENT_TRIPLE_SUB = "PatientTripleSub"; - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServerCapabilityStatementProviderR4Test.class); - private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); - private FhirValidator myValidator; - - @BeforeEach - public void before() { - myValidator = myCtx.newValidator(); - myValidator.registerValidatorModule(new FhirInstanceValidator(myCtx)); - } - - private HttpServletRequest createHttpServletRequest() { - HttpServletRequest req = mock(HttpServletRequest.class); - when(req.getRequestURI()).thenReturn("/FhirStorm/fhir/Patient/_search"); - when(req.getServletPath()).thenReturn("/fhir"); - when(req.getRequestURL()).thenReturn(new StringBuffer().append("http://fhirstorm.dyndns.org:8080/FhirStorm/fhir/Patient/_search")); - when(req.getContextPath()).thenReturn("/FhirStorm"); - return req; - } - - private ServletConfig createServletConfig() { - ServletConfig sc = mock(ServletConfig.class); - when(sc.getServletContext()).thenReturn(null); - return sc; - } - - private CapabilityStatementRestResourceComponent findRestResource(CapabilityStatement conformance, String wantResource) throws Exception { - CapabilityStatementRestResourceComponent resource = null; - for (CapabilityStatementRestResourceComponent next : conformance.getRest().get(0).getResource()) { - if (next.getType().equals(wantResource)) { - resource = next; - } - } - if (resource == null) { - throw new Exception("Could not find resource: " + wantResource); - } - return resource; - } - - @Test - public void testFormats() throws ServletException { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new ConditionalProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement cs = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - List formats = cs - .getFormat() - .stream() - .map(t -> t.getCode()) - .collect(Collectors.toList()); - assertThat(formats.toString(), formats, containsInAnyOrder( - "application/fhir+xml", - "xml", - "application/fhir+json", - "json", - "application/x-turtle", - "ttl" - )); - } - - - @Test - public void testConditionalOperations() throws Exception { - - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new ConditionalProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance); - ourLog.info(conf); - - assertEquals(2, conformance.getRest().get(0).getResource().size()); - CapabilityStatementRestResourceComponent res = conformance.getRest().get(0).getResource().get(1); - assertEquals("Patient", res.getType()); - - assertTrue(res.getConditionalCreate()); - assertEquals(ConditionalDeleteStatus.MULTIPLE, res.getConditionalDelete()); - assertTrue(res.getConditionalUpdate()); - } - - private RequestDetails createRequestDetails(RestfulServer theServer) { - ServletRequestDetails retVal = new ServletRequestDetails(null); - retVal.setServer(theServer); - retVal.setFhirServerBase("http://localhost/baseR4"); - return retVal; - } - - @Test - public void testExtendedOperationReturningBundle() throws Exception { - - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new ProviderWithExtendedOperationReturningBundle()); - rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - validate(conformance); - - assertEquals(1, conformance.getRest().get(0).getOperation().size()); - assertEquals("everything", conformance.getRest().get(0).getOperation().get(0).getName()); - - OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-everything"), createRequestDetails(rs)); - validate(opDef); - assertEquals("everything", opDef.getCode()); - assertThat(opDef.getSystem(), is(false)); - assertThat(opDef.getType(), is(false)); - assertThat(opDef.getInstance(), is(true)); - } - - @Test - public void testExtendedOperationReturningBundleOperation() throws Exception { + public static final String PATIENT_SUB = "PatientSub"; + public static final String PATIENT_SUB_SUB = "PatientSubSub"; + public static final String PATIENT_SUB_SUB_2 = "PatientSubSub2"; + public static final String PATIENT_TRIPLE_SUB = "PatientTripleSub"; + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServerCapabilityStatementProviderR4Test.class); + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); + private FhirValidator myValidator; + + @BeforeEach + public void before() { + myValidator = myCtx.newValidator(); + myValidator.registerValidatorModule(new FhirInstanceValidator(myCtx)); + } + + private HttpServletRequest createHttpServletRequest() { + HttpServletRequest req = mock(HttpServletRequest.class); + when(req.getRequestURI()).thenReturn("/FhirStorm/fhir/Patient/_search"); + when(req.getServletPath()).thenReturn("/fhir"); + when(req.getRequestURL()).thenReturn(new StringBuffer().append("http://fhirstorm.dyndns.org:8080/FhirStorm/fhir/Patient/_search")); + when(req.getContextPath()).thenReturn("/FhirStorm"); + return req; + } + + private ServletConfig createServletConfig() { + ServletConfig sc = mock(ServletConfig.class); + when(sc.getServletContext()).thenReturn(null); + return sc; + } + + private CapabilityStatementRestResourceComponent findRestResource(CapabilityStatement conformance, String wantResource) throws Exception { + CapabilityStatementRestResourceComponent resource = null; + for (CapabilityStatementRestResourceComponent next : conformance.getRest().get(0).getResource()) { + if (next.getType().equals(wantResource)) { + resource = next; + } + } + if (resource == null) { + throw new Exception("Could not find resource: " + wantResource); + } + return resource; + } + + @Test + public void testFormats() throws ServletException { + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new ConditionalProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement cs = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + List formats = cs + .getFormat() + .stream() + .map(t -> t.getCode()) + .collect(Collectors.toList()); + assertThat(formats.toString(), formats, containsInAnyOrder( + "application/fhir+xml", + "xml", + "application/fhir+json", + "json", + "application/x-turtle", + "ttl" + )); + } + + + @Test + public void testConditionalOperations() throws Exception { + + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new ConditionalProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance); + ourLog.info(conf); + + assertEquals(2, conformance.getRest().get(0).getResource().size()); + CapabilityStatementRestResourceComponent res = conformance.getRest().get(0).getResource().get(1); + assertEquals("Patient", res.getType()); + + assertTrue(res.getConditionalCreate()); + assertEquals(ConditionalDeleteStatus.MULTIPLE, res.getConditionalDelete()); + assertTrue(res.getConditionalUpdate()); + } + + private RequestDetails createRequestDetails(RestfulServer theServer) { + ServletRequestDetails retVal = new ServletRequestDetails(null); + retVal.setServer(theServer); + retVal.setFhirServerBase("http://localhost/baseR4"); + return retVal; + } + + @Test + public void testExtendedOperationReturningBundle() throws Exception { + + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new ProviderWithExtendedOperationReturningBundle()); + rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + validate(conformance); + + assertEquals(1, conformance.getRest().get(0).getOperation().size()); + assertEquals("everything", conformance.getRest().get(0).getOperation().get(0).getName()); + + OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-everything"), createRequestDetails(rs)); + validate(opDef); + assertEquals("everything", opDef.getCode()); + assertThat(opDef.getSystem(), is(false)); + assertThat(opDef.getType(), is(false)); + assertThat(opDef.getInstance(), is(true)); + } + + @Test + public void testExtendedOperationReturningBundleOperation() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new ProviderWithExtendedOperationReturningBundle()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new ProviderWithExtendedOperationReturningBundle()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs) { - }; - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs) { + }; + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); + rs.init(createServletConfig()); - OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-everything"), createRequestDetails(rs)); - validate(opDef); + OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-everything"), createRequestDetails(rs)); + validate(opDef); - String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef); - ourLog.info(conf); + String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef); + ourLog.info(conf); - assertEquals("everything", opDef.getCode()); - assertEquals(false, opDef.getAffectsState()); - } + assertEquals("everything", opDef.getCode()); + assertEquals(false, opDef.getAffectsState()); + } - @Test - public void testInstanceHistorySupported() throws Exception { + @Test + public void testInstanceHistorySupported() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new InstanceHistoryProvider()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new InstanceHistoryProvider()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); + rs.init(createServletConfig()); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + String conf = validate(conformance); - conf = myCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(conformance); - assertThat(conf, containsString("")); - } + conf = myCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(conformance); + assertThat(conf, containsString("")); + } - @Test - public void testMultiOptionalDocumentation() throws Exception { + @Test + public void testMultiOptionalDocumentation() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new MultiOptionalProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - boolean found = false; - Collection resourceBindings = rs.getResourceBindings(); - for (ResourceBinding resourceBinding : resourceBindings) { - if (resourceBinding.getResourceName().equals("Patient")) { - List> methodBindings = resourceBinding.getMethodBindings(); - SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); - SearchParameter param = (SearchParameter) binding.getParameters().iterator().next(); - assertEquals("The patient's identifier", param.getDescription()); - found = true; - } - } - - assertTrue(found); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); - - assertThat(conf, containsString("")); - assertThat(conf, containsString("")); - assertThat(conf, containsString("")); - } - - @Test - public void testNonConditionalOperations() throws Exception { - - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new NonConditionalProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - validate(conformance); - - CapabilityStatementRestResourceComponent res = conformance.getRest().get(0).getResource().get(1); - assertEquals("Patient", res.getType()); - - assertNull(res.getConditionalCreateElement().getValue()); - assertNull(res.getConditionalDeleteElement().getValue()); - assertNull(res.getConditionalUpdateElement().getValue()); - } - - /** - * See #379 - */ - @Test - public void testOperationAcrossMultipleTypes() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new MultiTypePatientProvider(), new MultiTypeEncounterProvider()); - rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - - validate(conformance); - - assertEquals(4, conformance.getRest().get(0).getOperation().size()); - List operationNames = toOperationNames(conformance.getRest().get(0).getOperation()); - assertThat(operationNames, containsInAnyOrder("someOp", "validate", "someOp", "validate")); - - List operationIdParts = toOperationIdParts(conformance.getRest().get(0).getOperation()); - assertThat(operationIdParts, containsInAnyOrder("Patient-i-someOp", "Encounter-i-someOp", "Patient-i-validate", "Encounter-i-validate")); - - { - OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-someOp"), createRequestDetails(rs)); - validate(opDef); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef)); - Set types = toStrings(opDef.getResource()); - assertEquals("someOp", opDef.getCode()); - assertEquals(true, opDef.getInstance()); - assertEquals(false, opDef.getSystem()); - assertThat(types, containsInAnyOrder("Patient")); - assertEquals(2, opDef.getParameter().size()); - assertEquals("someOpParam1", opDef.getParameter().get(0).getName()); - assertEquals("date", opDef.getParameter().get(0).getType()); - assertEquals("someOpParam2", opDef.getParameter().get(1).getName()); - assertEquals("Patient", opDef.getParameter().get(1).getType()); - } - { - OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Encounter-i-someOp"), createRequestDetails(rs)); - validate(opDef); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef)); - Set types = toStrings(opDef.getResource()); - assertEquals("someOp", opDef.getCode()); - assertEquals(true, opDef.getInstance()); - assertEquals(false, opDef.getSystem()); - assertThat(types, containsInAnyOrder("Encounter")); - assertEquals(2, opDef.getParameter().size()); - assertEquals("someOpParam1", opDef.getParameter().get(0).getName()); - assertEquals("date", opDef.getParameter().get(0).getType()); - assertEquals("someOpParam2", opDef.getParameter().get(1).getName()); - assertEquals("Encounter", opDef.getParameter().get(1).getType()); - } - { - OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-validate"), createRequestDetails(rs)); - validate(opDef); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef)); - Set types = toStrings(opDef.getResource()); - assertEquals("validate", opDef.getCode()); - assertEquals(true, opDef.getInstance()); - assertEquals(false, opDef.getSystem()); - assertThat(types, containsInAnyOrder("Patient")); - assertEquals(1, opDef.getParameter().size()); - assertEquals("resource", opDef.getParameter().get(0).getName()); - assertEquals("Patient", opDef.getParameter().get(0).getType()); - } - } - - @Test - public void testOperationDocumentation() throws Exception { - - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new SearchProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - - String conf = validate(conformance); - - assertThat(conf, containsString("")); - assertThat(conf, containsString("")); - - } - - @Test - public void testOperationOnNoTypes() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new PlainProviderWithExtendedOperationOnNoType()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs) { - @Override - public CapabilityStatement getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { - return (CapabilityStatement) super.getServerConformance(theRequest, createRequestDetails(rs)); - } - }; - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/-is-plain"), createRequestDetails(rs)); - validate(opDef); - - assertEquals("plain", opDef.getCode()); - assertEquals(false, opDef.getAffectsState()); - assertEquals(3, opDef.getParameter().size()); - - assertTrue(opDef.getParameter().get(0).hasName()); - assertEquals("start", opDef.getParameter().get(0).getName()); - assertEquals("in", opDef.getParameter().get(0).getUse().toCode()); - assertEquals("0", opDef.getParameter().get(0).getMinElement().getValueAsString()); - assertEquals("date", opDef.getParameter().get(0).getTypeElement().getValueAsString()); - - assertEquals("out1", opDef.getParameter().get(2).getName()); - assertEquals("out", opDef.getParameter().get(2).getUse().toCode()); - assertEquals("1", opDef.getParameter().get(2).getMinElement().getValueAsString()); - assertEquals("2", opDef.getParameter().get(2).getMaxElement().getValueAsString()); - assertEquals("string", opDef.getParameter().get(2).getTypeElement().getValueAsString()); - - assertThat(opDef.getSystem(), is(true)); - assertThat(opDef.getType(), is(false)); - assertThat(opDef.getInstance(), is(true)); - } - - @Test - public void testProviderWithRequiredAndOptional() throws Exception { - - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new ProviderWithRequiredAndOptional()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - validate(conformance); - - CapabilityStatementRestComponent rest = conformance.getRest().get(0); - CapabilityStatementRestResourceComponent res = rest.getResource().get(0); - assertEquals("DiagnosticReport", res.getType()); - - assertEquals("subject.identifier", res.getSearchParam().get(0).getName()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new MultiOptionalProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + boolean found = false; + Collection resourceBindings = rs.getResourceBindings(); + for (ResourceBinding resourceBinding : resourceBindings) { + if (resourceBinding.getResourceName().equals("Patient")) { + List> methodBindings = resourceBinding.getMethodBindings(); + SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); + SearchParameter param = (SearchParameter) binding.getParameters().iterator().next(); + assertEquals("The patient's identifier", param.getDescription()); + found = true; + } + } + + assertTrue(found); + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + String conf = validate(conformance); + + assertThat(conf, containsString("")); + assertThat(conf, containsString("")); + assertThat(conf, containsString("")); + } + + @Test + public void testNonConditionalOperations() throws Exception { + + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new NonConditionalProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + validate(conformance); + + CapabilityStatementRestResourceComponent res = conformance.getRest().get(0).getResource().get(1); + assertEquals("Patient", res.getType()); + + assertNull(res.getConditionalCreateElement().getValue()); + assertNull(res.getConditionalDeleteElement().getValue()); + assertNull(res.getConditionalUpdateElement().getValue()); + } + + /** + * See #379 + */ + @Test + public void testOperationAcrossMultipleTypes() throws Exception { + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new MultiTypePatientProvider(), new MultiTypeEncounterProvider()); + rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + + validate(conformance); + + assertEquals(4, conformance.getRest().get(0).getOperation().size()); + List operationNames = toOperationNames(conformance.getRest().get(0).getOperation()); + assertThat(operationNames, containsInAnyOrder("someOp", "validate", "someOp", "validate")); + + List operationIdParts = toOperationIdParts(conformance.getRest().get(0).getOperation()); + assertThat(operationIdParts, containsInAnyOrder("Patient-i-someOp", "Encounter-i-someOp", "Patient-i-validate", "Encounter-i-validate")); + + { + OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-someOp"), createRequestDetails(rs)); + validate(opDef); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef)); + Set types = toStrings(opDef.getResource()); + assertEquals("someOp", opDef.getCode()); + assertEquals(true, opDef.getInstance()); + assertEquals(false, opDef.getSystem()); + assertThat(types, containsInAnyOrder("Patient")); + assertEquals(2, opDef.getParameter().size()); + assertEquals("someOpParam1", opDef.getParameter().get(0).getName()); + assertEquals("date", opDef.getParameter().get(0).getType()); + assertEquals("someOpParam2", opDef.getParameter().get(1).getName()); + assertEquals("Patient", opDef.getParameter().get(1).getType()); + } + { + OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Encounter-i-someOp"), createRequestDetails(rs)); + validate(opDef); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef)); + Set types = toStrings(opDef.getResource()); + assertEquals("someOp", opDef.getCode()); + assertEquals(true, opDef.getInstance()); + assertEquals(false, opDef.getSystem()); + assertThat(types, containsInAnyOrder("Encounter")); + assertEquals(2, opDef.getParameter().size()); + assertEquals("someOpParam1", opDef.getParameter().get(0).getName()); + assertEquals("date", opDef.getParameter().get(0).getType()); + assertEquals("someOpParam2", opDef.getParameter().get(1).getName()); + assertEquals("Encounter", opDef.getParameter().get(1).getType()); + } + { + OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-validate"), createRequestDetails(rs)); + validate(opDef); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(opDef)); + Set types = toStrings(opDef.getResource()); + assertEquals("validate", opDef.getCode()); + assertEquals(true, opDef.getInstance()); + assertEquals(false, opDef.getSystem()); + assertThat(types, containsInAnyOrder("Patient")); + assertEquals(1, opDef.getParameter().size()); + assertEquals("resource", opDef.getParameter().get(0).getName()); + assertEquals("Patient", opDef.getParameter().get(0).getType()); + } + } + + @Test + public void testOperationDocumentation() throws Exception { + + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new SearchProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + + String conf = validate(conformance); + + assertThat(conf, containsString("")); + assertThat(conf, containsString("")); + + } + + @Test + public void testOperationOnNoTypes() throws Exception { + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new PlainProviderWithExtendedOperationOnNoType()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs) { + @Override + public CapabilityStatement getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { + return (CapabilityStatement) super.getServerConformance(theRequest, createRequestDetails(rs)); + } + }; + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/-is-plain"), createRequestDetails(rs)); + validate(opDef); + + assertEquals("plain", opDef.getCode()); + assertEquals(false, opDef.getAffectsState()); + assertEquals(3, opDef.getParameter().size()); + + assertTrue(opDef.getParameter().get(0).hasName()); + assertEquals("start", opDef.getParameter().get(0).getName()); + assertEquals("in", opDef.getParameter().get(0).getUse().toCode()); + assertEquals("0", opDef.getParameter().get(0).getMinElement().getValueAsString()); + assertEquals("date", opDef.getParameter().get(0).getTypeElement().getValueAsString()); + + assertEquals("out1", opDef.getParameter().get(2).getName()); + assertEquals("out", opDef.getParameter().get(2).getUse().toCode()); + assertEquals("1", opDef.getParameter().get(2).getMinElement().getValueAsString()); + assertEquals("2", opDef.getParameter().get(2).getMaxElement().getValueAsString()); + assertEquals("string", opDef.getParameter().get(2).getTypeElement().getValueAsString()); + + assertThat(opDef.getSystem(), is(true)); + assertThat(opDef.getType(), is(false)); + assertThat(opDef.getInstance(), is(true)); + } + + @Test + public void testProviderWithRequiredAndOptional() throws Exception { + + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new ProviderWithRequiredAndOptional()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + validate(conformance); + + CapabilityStatementRestComponent rest = conformance.getRest().get(0); + CapabilityStatementRestResourceComponent res = rest.getResource().get(0); + assertEquals("DiagnosticReport", res.getType()); + + assertEquals("subject.identifier", res.getSearchParam().get(0).getName()); // assertEquals("identifier", res.getSearchParam().get(0).getChain().get(0).getValue()); - assertEquals(DiagnosticReport.SP_CODE, res.getSearchParam().get(1).getName()); + assertEquals(DiagnosticReport.SP_CODE, res.getSearchParam().get(1).getName()); - assertEquals(DiagnosticReport.SP_DATE, res.getSearchParam().get(2).getName()); + assertEquals(DiagnosticReport.SP_DATE, res.getSearchParam().get(2).getName()); - assertEquals(1, res.getSearchInclude().size()); - assertEquals("DiagnosticReport.result", res.getSearchInclude().get(0).getValue()); - } + assertEquals(1, res.getSearchInclude().size()); + assertEquals("DiagnosticReport.result", res.getSearchInclude().get(0).getValue()); + } - @Test - public void testReadAndVReadSupported() throws Exception { + @Test + public void testReadAndVReadSupported() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new VreadProvider()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new VreadProvider()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); + rs.init(createServletConfig()); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + String conf = validate(conformance); - assertThat(conf, containsString("")); - assertThat(conf, containsString("")); - } + assertThat(conf, containsString("")); + assertThat(conf, containsString("")); + } - @Test - public void testReadSupported() throws Exception { + @Test + public void testReadSupported() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new ReadProvider()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new ReadProvider()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); + rs.init(createServletConfig()); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance); - ourLog.info(conf); + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance); + ourLog.info(conf); - conf = myCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(conformance); - assertThat(conf, not(containsString(""))); - assertThat(conf, containsString("")); - } + conf = myCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(conformance); + assertThat(conf, not(containsString(""))); + assertThat(conf, containsString("")); + } - @Test - public void testSearchParameterDocumentation() throws Exception { + @Test + public void testSearchParameterDocumentation() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new SearchProvider()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new SearchProvider()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); + rs.init(createServletConfig()); - boolean found = false; - Collection resourceBindings = rs.getResourceBindings(); - for (ResourceBinding resourceBinding : resourceBindings) { - if (resourceBinding.getResourceName().equals("Patient")) { - List> methodBindings = resourceBinding.getMethodBindings(); - SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); - for (IParameter next : binding.getParameters()) { - SearchParameter param = (SearchParameter) next; - if (param.getDescription().contains("The patient's identifier (MRN or other card number")) { - found = true; - } - } - found = true; - } - } - assertTrue(found); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + boolean found = false; + Collection resourceBindings = rs.getResourceBindings(); + for (ResourceBinding resourceBinding : resourceBindings) { + if (resourceBinding.getResourceName().equals("Patient")) { + List> methodBindings = resourceBinding.getMethodBindings(); + SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); + for (IParameter next : binding.getParameters()) { + SearchParameter param = (SearchParameter) next; + if (param.getDescription().contains("The patient's identifier (MRN or other card number")) { + found = true; + } + } + found = true; + } + } + assertTrue(found); + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); + String conf = validate(conformance); - assertThat(conf, containsString("")); - assertThat(conf, containsString("")); + assertThat(conf, containsString("")); + assertThat(conf, containsString("")); - } + } - @Test - public void testFormatIncludesSpecialNonMediaTypeFormats() throws ServletException { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new SearchProvider()); + @Test + public void testFormatIncludesSpecialNonMediaTypeFormats() throws ServletException { + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new SearchProvider()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); - CapabilityStatement serverConformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + rs.init(createServletConfig()); + CapabilityStatement serverConformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - List formatCodes = serverConformance.getFormat().stream().map(c -> c.getCode()).collect(Collectors.toList()); + List formatCodes = serverConformance.getFormat().stream().map(c -> c.getCode()).collect(Collectors.toList()); - assertThat(formatCodes, hasItem(Constants.FORMAT_XML)); - assertThat(formatCodes, hasItem(Constants.FORMAT_JSON)); - assertThat(formatCodes, hasItem(Constants.CT_FHIR_JSON_NEW)); - assertThat(formatCodes, hasItem(Constants.CT_FHIR_XML_NEW)); - } + assertThat(formatCodes, hasItem(Constants.FORMAT_XML)); + assertThat(formatCodes, hasItem(Constants.FORMAT_JSON)); + assertThat(formatCodes, hasItem(Constants.CT_FHIR_JSON_NEW)); + assertThat(formatCodes, hasItem(Constants.CT_FHIR_XML_NEW)); + } - /** - * See #286 - */ - @Test - public void testSearchReferenceParameterDocumentation() throws Exception { + /** + * See #286 + */ + @Test + public void testSearchReferenceParameterDocumentation() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new PatientResourceProvider()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new PatientResourceProvider()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); + rs.init(createServletConfig()); - boolean found = false; - Collection resourceBindings = rs.getResourceBindings(); - for (ResourceBinding resourceBinding : resourceBindings) { - if (resourceBinding.getResourceName().equals("Patient")) { - List> methodBindings = resourceBinding.getMethodBindings(); - SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); - SearchParameter param = (SearchParameter) binding.getParameters().get(25); - assertEquals("The organization at which this person is a patient", param.getDescription()); - found = true; - } - } - assertTrue(found); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + boolean found = false; + Collection resourceBindings = rs.getResourceBindings(); + for (ResourceBinding resourceBinding : resourceBindings) { + if (resourceBinding.getResourceName().equals("Patient")) { + List> methodBindings = resourceBinding.getMethodBindings(); + SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); + SearchParameter param = (SearchParameter) binding.getParameters().get(25); + assertEquals("The organization at which this person is a patient", param.getDescription()); + found = true; + } + } + assertTrue(found); + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); + String conf = validate(conformance); - } + } - /** - * See #286 - */ - @Test - public void testSearchReferenceParameterWithWhitelistDocumentation() throws Exception { + /** + * See #286 + */ + @Test + public void testSearchReferenceParameterWithWhitelistDocumentation() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new SearchProviderWithWhitelist()); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new SearchProviderWithWhitelist()); - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - rs.init(createServletConfig()); + rs.init(createServletConfig()); - boolean found = false; - Collection resourceBindings = rs.getResourceBindings(); - for (ResourceBinding resourceBinding : resourceBindings) { - if (resourceBinding.getResourceName().equals("Patient")) { - List> methodBindings = resourceBinding.getMethodBindings(); - SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); - SearchParameter param = (SearchParameter) binding.getParameters().get(0); - assertEquals("The organization at which this person is a patient", param.getDescription()); - found = true; - } - } - assertTrue(found); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + boolean found = false; + Collection resourceBindings = rs.getResourceBindings(); + for (ResourceBinding resourceBinding : resourceBindings) { + if (resourceBinding.getResourceName().equals("Patient")) { + List> methodBindings = resourceBinding.getMethodBindings(); + SearchMethodBinding binding = (SearchMethodBinding) methodBindings.get(0); + SearchParameter param = (SearchParameter) binding.getParameters().get(0); + assertEquals("The organization at which this person is a patient", param.getDescription()); + found = true; + } + } + assertTrue(found); + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); + String conf = validate(conformance); - CapabilityStatementRestResourceComponent resource = findRestResource(conformance, "Patient"); + CapabilityStatementRestResourceComponent resource = findRestResource(conformance, "Patient"); - CapabilityStatementRestResourceSearchParamComponent param = resource.getSearchParam().get(0); + CapabilityStatementRestResourceSearchParamComponent param = resource.getSearchParam().get(0); // assertEquals("bar", param.getChain().get(0).getValue()); // assertEquals("foo", param.getChain().get(1).getValue()); // assertEquals(2, param.getChain().size()); - } + } - @Test - public void testSearchReferenceParameterWithList() throws Exception { + @Test + public void testSearchReferenceParameterWithList() throws Exception { - RestfulServer rsNoType = new RestfulServer(myCtx) { - @Override - public RestfulServerConfiguration createConfiguration() { - RestfulServerConfiguration retVal = super.createConfiguration(); - retVal.setConformanceDate(new InstantDt("2011-02-22T11:22:33Z")); - return retVal; - } - }; - rsNoType.registerProvider(new SearchProviderWithListNoType()); - ServerCapabilityStatementProvider scNoType = new ServerCapabilityStatementProvider(rsNoType); - rsNoType.setServerConformanceProvider(scNoType); - rsNoType.init(createServletConfig()); + RestfulServer rsNoType = new RestfulServer(myCtx) { + @Override + public RestfulServerConfiguration createConfiguration() { + RestfulServerConfiguration retVal = super.createConfiguration(); + retVal.setConformanceDate(new InstantDt("2011-02-22T11:22:33Z")); + return retVal; + } + }; + rsNoType.registerProvider(new SearchProviderWithListNoType()); + ServerCapabilityStatementProvider scNoType = new ServerCapabilityStatementProvider(rsNoType); + rsNoType.setServerConformanceProvider(scNoType); + rsNoType.init(createServletConfig()); - CapabilityStatement conformance = (CapabilityStatement) scNoType.getServerConformance(createHttpServletRequest(), createRequestDetails(rsNoType)); - conformance.setId(""); - String confNoType = validate(conformance); + CapabilityStatement conformance = (CapabilityStatement) scNoType.getServerConformance(createHttpServletRequest(), createRequestDetails(rsNoType)); + conformance.setId(""); + String confNoType = validate(conformance); - RestfulServer rsWithType = new RestfulServer(myCtx) { - @Override - public RestfulServerConfiguration createConfiguration() { - RestfulServerConfiguration retVal = super.createConfiguration(); - retVal.setConformanceDate(new InstantDt("2011-02-22T11:22:33Z")); - return retVal; - } - }; - rsWithType.registerProvider(new SearchProviderWithListWithType()); - ServerCapabilityStatementProvider scWithType = new ServerCapabilityStatementProvider(rsWithType); - rsWithType.setServerConformanceProvider(scWithType); - rsWithType.init(createServletConfig()); + RestfulServer rsWithType = new RestfulServer(myCtx) { + @Override + public RestfulServerConfiguration createConfiguration() { + RestfulServerConfiguration retVal = super.createConfiguration(); + retVal.setConformanceDate(new InstantDt("2011-02-22T11:22:33Z")); + return retVal; + } + }; + rsWithType.registerProvider(new SearchProviderWithListWithType()); + ServerCapabilityStatementProvider scWithType = new ServerCapabilityStatementProvider(rsWithType); + rsWithType.setServerConformanceProvider(scWithType); + rsWithType.init(createServletConfig()); - CapabilityStatement conformanceWithType = (CapabilityStatement) scWithType.getServerConformance(createHttpServletRequest(), createRequestDetails(rsWithType)); - conformanceWithType.setId(""); - String confWithType = validate(conformanceWithType); + CapabilityStatement conformanceWithType = (CapabilityStatement) scWithType.getServerConformance(createHttpServletRequest(), createRequestDetails(rsWithType)); + conformanceWithType.setId(""); + String confWithType = validate(conformanceWithType); - assertEquals(confNoType, confWithType); - assertThat(confNoType, containsString("")); - } + assertEquals(confNoType, confWithType); + assertThat(confNoType, containsString("")); + } - @Test - public void testSystemHistorySupported() throws Exception { + @Test + public void testSystemHistorySupported() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new SystemHistoryProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); - - assertThat(conf, containsString("")); - } - - @Test - public void testTypeHistorySupported() throws Exception { - - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new TypeHistoryProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - String conf = validate(conformance); - - assertThat(conf, containsString("")); - } - - @Test - public void testStaticIncludeChains() throws Exception { - - class MyProvider implements IResourceProvider { - - @Override - public Class getResourceType() { - return DiagnosticReport.class; - } - - @Search - public List search(@RequiredParam(name = DiagnosticReport.SP_PATIENT + "." + Patient.SP_FAMILY) StringParam lastName, - @RequiredParam(name = DiagnosticReport.SP_PATIENT + "." + Patient.SP_GIVEN) StringParam firstName, - @RequiredParam(name = DiagnosticReport.SP_PATIENT + "." + Patient.SP_BIRTHDATE) DateParam dob, - @OptionalParam(name = DiagnosticReport.SP_DATE) DateRangeParam range) { - return null; - } - - } - - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new MyProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs) { - }; - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - - validate(opDef); - - CapabilityStatementRestResourceComponent resource = opDef.getRest().get(0).getResource().get(0); - assertEquals("DiagnosticReport", resource.getType()); - List searchParamNames = resource.getSearchParam().stream().map(t -> t.getName()).collect(Collectors.toList()); - assertThat(searchParamNames, containsInAnyOrder("patient.birthdate", "patient.family", "patient.given", "date")); - } - - @Test - public void testSystemLevelNamedQueryWithParameters() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new NamedQueryPlainProvider()); - rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - validate(conformance); - - CapabilityStatementRestComponent restComponent = conformance.getRest().get(0); - CapabilityStatementRestResourceOperationComponent operationComponent = restComponent.getOperation().get(0); - assertThat(operationComponent.getName(), is(NamedQueryPlainProvider.QUERY_NAME)); - - String operationReference = operationComponent.getDefinition(); - assertThat(operationReference, not(nullValue())); - - OperationDefinition operationDefinition = (OperationDefinition) sc.readOperationDefinition(new IdType(operationReference), createRequestDetails(rs)); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(operationDefinition)); - validate(operationDefinition); - assertThat(operationDefinition.getCode(), is(NamedQueryPlainProvider.QUERY_NAME)); - assertThat(operationDefinition.getName(), is("Search_" + NamedQueryPlainProvider.QUERY_NAME)); - assertThat(operationDefinition.getStatus(), is(PublicationStatus.ACTIVE)); - assertThat(operationDefinition.getKind(), is(OperationKind.QUERY)); - assertThat(operationDefinition.getDescription(), is(NamedQueryPlainProvider.DESCRIPTION)); - assertThat(operationDefinition.getAffectsState(), is(false)); - assertThat("A system level search has no target resources", operationDefinition.getResource(), is(empty())); - assertThat(operationDefinition.getSystem(), is(true)); - assertThat(operationDefinition.getType(), is(false)); - assertThat(operationDefinition.getInstance(), is(false)); - List parameters = operationDefinition.getParameter(); - assertThat(parameters.size(), is(1)); - OperationDefinitionParameterComponent param = parameters.get(0); - assertThat(param.getName(), is(NamedQueryPlainProvider.SP_QUANTITY)); - assertThat(param.getType(), is("string")); - assertThat(param.getSearchTypeElement().asStringValue(), is(RestSearchParameterTypeEnum.QUANTITY.getCode())); - assertThat(param.getMin(), is(1)); - assertThat(param.getMax(), is("1")); - assertThat(param.getUse(), is(OperationParameterUse.IN)); - } - - @Test - public void testResourceLevelNamedQueryWithParameters() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new NamedQueryResourceProvider()); - rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - validate(conformance); - - CapabilityStatementRestComponent restComponent = conformance.getRest().get(0); - CapabilityStatementRestResourceOperationComponent operationComponent = restComponent.getOperation().get(0); - String operationReference = operationComponent.getDefinition(); - assertThat(operationReference, not(nullValue())); - - OperationDefinition operationDefinition = (OperationDefinition) sc.readOperationDefinition(new IdType(operationReference), createRequestDetails(rs)); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(operationDefinition)); - validate(operationDefinition); - assertThat("The operation name should be the code if no description is set", operationDefinition.getName(), is("Search_" + NamedQueryResourceProvider.QUERY_NAME)); - String patientResourceName = "Patient"; - assertThat("A resource level search targets the resource of the provider it's defined in", operationDefinition.getResource().get(0).getValue(), is(patientResourceName)); - assertThat(operationDefinition.getSystem(), is(false)); - assertThat(operationDefinition.getType(), is(true)); - assertThat(operationDefinition.getInstance(), is(false)); - List parameters = operationDefinition.getParameter(); - assertThat(parameters.size(), is(1)); - OperationDefinitionParameterComponent param = parameters.get(0); - assertThat(param.getName(), is(NamedQueryResourceProvider.SP_PARAM)); - assertThat(param.getType(), is("string")); - assertThat(param.getSearchTypeElement().asStringValue(), is(RestSearchParameterTypeEnum.STRING.getCode())); - assertThat(param.getMin(), is(0)); - assertThat(param.getMax(), is("1")); - assertThat(param.getUse(), is(OperationParameterUse.IN)); - - CapabilityStatementRestResourceComponent patientResource = restComponent.getResource().stream() - .filter(r -> patientResourceName.equals(r.getType())) - .findAny().get(); - assertThat("Named query parameters should not appear in the resource search params", patientResource.getSearchParam(), is(empty())); - } - - @Test - public void testExtendedOperationAtTypeLevel() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setProviders(new TypeLevelOperationProvider()); - rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - - validate(conformance); - - List operations = conformance.getRest().get(0).getOperation(); - assertThat(operations.size(), is(1)); - assertThat(operations.get(0).getName(), is(TypeLevelOperationProvider.OPERATION_NAME)); - - OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType(operations.get(0).getDefinition()), createRequestDetails(rs)); - validate(opDef); - assertEquals(TypeLevelOperationProvider.OPERATION_NAME, opDef.getCode()); - assertThat(opDef.getSystem(), is(false)); - assertThat(opDef.getType(), is(true)); - assertThat(opDef.getInstance(), is(false)); - } - - @Test - public void testProfiledResourceStructureDefinitionLinks() throws Exception { - RestfulServer rs = new RestfulServer(myCtx); - rs.setResourceProviders(new ProfiledPatientProvider(), new MultipleProfilesPatientProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance)); - - List resources = conformance.getRestFirstRep().getResource(); - CapabilityStatementRestResourceComponent patientResource = resources.stream() - .filter(resource -> "Patient".equals(resource.getType())) - .findFirst().get(); - assertThat(patientResource.getProfile(), containsString(PATIENT_SUB)); - } - - @Test - public void testRevIncludes_Explicit() throws Exception { - - class PatientResourceProvider implements IResourceProvider { - - @Override - public Class getResourceType() { - return Patient.class; - } - - @Search - public List search(@IncludeParam(reverse = true, allow = {"Observation:foo", "Provenance:bar"}) Set theRevIncludes) { - return Collections.emptyList(); - } - - } - - class ObservationResourceProvider implements IResourceProvider { - - @Override - public Class getResourceType() { - return Observation.class; - } - - @Search - public List search(@OptionalParam(name = "subject") ReferenceParam theSubject) { - return Collections.emptyList(); - } - - } - - RestfulServer rs = new RestfulServer(myCtx); - rs.setResourceProviders(new PatientResourceProvider(), new ObservationResourceProvider()); - - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - sc.setRestResourceRevIncludesEnabled(true); - rs.setServerConformanceProvider(sc); - - rs.init(createServletConfig()); - - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance)); - - List resources = conformance.getRestFirstRep().getResource(); - CapabilityStatementRestResourceComponent patientResource = resources.stream() - .filter(resource -> "Patient".equals(resource.getType())) - .findFirst().get(); - assertThat(toStrings(patientResource.getSearchRevInclude()), containsInAnyOrder("Observation:foo", "Provenance:bar")); - } + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new SystemHistoryProvider()); - @Test - public void testRevIncludes_Inferred() throws Exception { + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - class PatientResourceProvider implements IResourceProvider { + rs.init(createServletConfig()); - @Override - public Class getResourceType() { - return Patient.class; - } + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + String conf = validate(conformance); - @Search - public List search(@IncludeParam(reverse = true) Set theRevIncludes) { - return Collections.emptyList(); - } - - } - - class ObservationResourceProvider implements IResourceProvider { - - @Override - public Class getResourceType() { - return Observation.class; - } - - @Search - public List search(@OptionalParam(name = "subject") ReferenceParam theSubject) { - return Collections.emptyList(); - } + assertThat(conf, containsString("")); + } - } - - RestfulServer rs = new RestfulServer(myCtx); - rs.setResourceProviders(new PatientResourceProvider(), new ObservationResourceProvider()); + @Test + public void testTypeHistorySupported() throws Exception { - ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); - sc.setRestResourceRevIncludesEnabled(true); - rs.setServerConformanceProvider(sc); + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new TypeHistoryProvider()); - rs.init(createServletConfig()); + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); - CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); - ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance)); - - List resources = conformance.getRestFirstRep().getResource(); - CapabilityStatementRestResourceComponent patientResource = resources.stream() - .filter(resource -> "Patient".equals(resource.getType())) - .findFirst().get(); - assertThat(toStrings(patientResource.getSearchRevInclude()), containsInAnyOrder("Observation:subject")); - } + rs.init(createServletConfig()); - private List toOperationIdParts(List theOperation) { - ArrayList retVal = Lists.newArrayList(); - for (CapabilityStatementRestResourceOperationComponent next : theOperation) { - retVal.add(new IdType(next.getDefinition()).getIdPart()); - } - return retVal; - } - - private List toOperationNames(List theOperation) { - ArrayList retVal = Lists.newArrayList(); - for (CapabilityStatementRestResourceOperationComponent next : theOperation) { - retVal.add(next.getName()); - } - return retVal; - } - - private String validate(IBaseResource theResource) { - String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(theResource); - ourLog.info("Def:\n{}", conf); - - ValidationResult result = myValidator.validateWithResult(conf); - OperationOutcome operationOutcome = (OperationOutcome) result.toOperationOutcome(); - String outcome = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(operationOutcome); - ourLog.info("Outcome: {}", outcome); - - assertTrue(result.isSuccessful(), outcome); - List warningsAndErrors = operationOutcome - .getIssue() - .stream() - .filter(t -> t.getSeverity().ordinal() <= OperationOutcome.IssueSeverity.WARNING.ordinal()) // <= because this enum has a strange order - .collect(Collectors.toList()); - assertThat(outcome, warningsAndErrors, is(empty())); - - return myCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(theResource); - } - - @SuppressWarnings("unused") - public static class ConditionalProvider implements IResourceProvider { - - @Create - public MethodOutcome create(@ResourceParam Patient thePatient, @ConditionalUrlParam String theConditionalUrl) { - return null; - } - - @Delete - public MethodOutcome delete(@IdParam IdType theId, @ConditionalUrlParam(supportsMultiple = true) String theConditionalUrl) { - return null; - } - - @Override - public Class getResourceType() { - return Patient.class; - } - - @Update - public MethodOutcome update(@IdParam IdType theId, @ResourceParam Patient thePatient, @ConditionalUrlParam String theConditionalUrl) { - return null; - } - - } - - @SuppressWarnings("unused") - public static class InstanceHistoryProvider implements IResourceProvider { - @Override - public Class getResourceType() { - return Patient.class; - } - - @History - public List history(@IdParam IdType theId) { - return null; - } - - } - - @SuppressWarnings("unused") - public static class MultiOptionalProvider { - - @Search(type = Patient.class) - public Patient findPatient(@Description(shortDefinition = "The patient's identifier") @OptionalParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier, - @Description(shortDefinition = "The patient's name") @OptionalParam(name = Patient.SP_NAME) StringParam theName) { - return null; - } - - } - - @SuppressWarnings("unused") - public static class MultiTypeEncounterProvider implements IResourceProvider { - - @Operation(name = "someOp") - public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, - @OperationParam(name = "someOpParam1") DateType theStart, @OperationParam(name = "someOpParam2") Encounter theEnd) { - return null; - } - - @Override - public Class getResourceType() { - return Encounter.class; - } - - @Validate - public IBundleProvider validate(HttpServletRequest theServletRequest, @IdParam IdType theId, @ResourceParam Encounter thePatient) { - return null; - } - - } - - @SuppressWarnings("unused") - public static class MultiTypePatientProvider implements IResourceProvider { - - @Operation(name = "someOp") - public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, - @OperationParam(name = "someOpParam1") DateType theStart, @OperationParam(name = "someOpParam2") Patient theEnd) { - return null; - } - - @Override - public Class getResourceType() { - return Patient.class; - } - - @Validate - public IBundleProvider validate(HttpServletRequest theServletRequest, @IdParam IdType theId, @ResourceParam Patient thePatient) { - return null; - } - - } - - @SuppressWarnings("unused") - public static class NonConditionalProvider implements IResourceProvider { - - @Create - public MethodOutcome create(@ResourceParam Patient thePatient) { - return null; - } + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + String conf = validate(conformance); - @Delete - public MethodOutcome delete(@IdParam IdType theId) { - return null; - } - - @Override - public Class getResourceType() { - return Patient.class; - } + assertThat(conf, containsString("")); + } - @Update - public MethodOutcome update(@IdParam IdType theId, @ResourceParam Patient thePatient) { - return null; - } + @Test + public void testStaticIncludeChains() throws Exception { - } + class MyProvider implements IResourceProvider { - @SuppressWarnings("unused") - public static class PlainProviderWithExtendedOperationOnNoType { + @Override + public Class getResourceType() { + return DiagnosticReport.class; + } - @Operation(name = "plain", idempotent = true, returnParameters = {@OperationParam(min = 1, max = 2, name = "out1", type = StringType.class)}) - public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, @OperationParam(name = "start") DateType theStart, - @OperationParam(name = "end") DateType theEnd) { - return null; - } + @Search + public List search(@RequiredParam(name = DiagnosticReport.SP_PATIENT + "." + Patient.SP_FAMILY) StringParam lastName, + @RequiredParam(name = DiagnosticReport.SP_PATIENT + "." + Patient.SP_GIVEN) StringParam firstName, + @RequiredParam(name = DiagnosticReport.SP_PATIENT + "." + Patient.SP_BIRTHDATE) DateParam dob, + @OptionalParam(name = DiagnosticReport.SP_DATE) DateRangeParam range) { + return null; + } - } + } + + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new MyProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs) { + }; + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + + validate(opDef); + + CapabilityStatementRestResourceComponent resource = opDef.getRest().get(0).getResource().get(0); + assertEquals("DiagnosticReport", resource.getType()); + List searchParamNames = resource.getSearchParam().stream().map(t -> t.getName()).collect(Collectors.toList()); + assertThat(searchParamNames, containsInAnyOrder("patient.birthdate", "patient.family", "patient.given", "date")); + } - @SuppressWarnings("unused") - public static class ProviderWithExtendedOperationReturningBundle implements IResourceProvider { + @Test + public void testIncludeLastUpdatedSearchParam() throws Exception { - @Operation(name = "everything", idempotent = true) - public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, @OperationParam(name = "start") DateType theStart, - @OperationParam(name = "end") DateType theEnd) { - return null; - } + class MyProvider implements IResourceProvider { - @Override - public Class getResourceType() { - return Patient.class; - } + @Override + public Class getResourceType() { + return DiagnosticReport.class; + } - } + @Search + public List search(@OptionalParam(name = DiagnosticReport.SP_DATE) + DateRangeParam range, - @SuppressWarnings("unused") - public static class ProviderWithRequiredAndOptional { + @Description(shortDefinition = "Only return resources which were last updated as specified by the given range") + @OptionalParam(name = "_lastUpdated") + DateRangeParam theLastUpdated + ) { + return null; + } + + } + + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new MyProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs) { + }; + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + + validate(opDef); + + CapabilityStatementRestResourceComponent resource = opDef.getRest().get(0).getResource().get(0); + assertEquals("DiagnosticReport", resource.getType()); + List searchParamNames = resource.getSearchParam().stream().map(t -> t.getName()).collect(Collectors.toList()); + assertThat(searchParamNames, containsInAnyOrder("date", "_lastUpdated")); + } + + @Test + public void testSystemLevelNamedQueryWithParameters() throws Exception { + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new NamedQueryPlainProvider()); + rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + validate(conformance); + + CapabilityStatementRestComponent restComponent = conformance.getRest().get(0); + CapabilityStatementRestResourceOperationComponent operationComponent = restComponent.getOperation().get(0); + assertThat(operationComponent.getName(), is(NamedQueryPlainProvider.QUERY_NAME)); + + String operationReference = operationComponent.getDefinition(); + assertThat(operationReference, not(nullValue())); + + OperationDefinition operationDefinition = (OperationDefinition) sc.readOperationDefinition(new IdType(operationReference), createRequestDetails(rs)); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(operationDefinition)); + validate(operationDefinition); + assertThat(operationDefinition.getCode(), is(NamedQueryPlainProvider.QUERY_NAME)); + assertThat(operationDefinition.getName(), is("Search_" + NamedQueryPlainProvider.QUERY_NAME)); + assertThat(operationDefinition.getStatus(), is(PublicationStatus.ACTIVE)); + assertThat(operationDefinition.getKind(), is(OperationKind.QUERY)); + assertThat(operationDefinition.getDescription(), is(NamedQueryPlainProvider.DESCRIPTION)); + assertThat(operationDefinition.getAffectsState(), is(false)); + assertThat("A system level search has no target resources", operationDefinition.getResource(), is(empty())); + assertThat(operationDefinition.getSystem(), is(true)); + assertThat(operationDefinition.getType(), is(false)); + assertThat(operationDefinition.getInstance(), is(false)); + List parameters = operationDefinition.getParameter(); + assertThat(parameters.size(), is(1)); + OperationDefinitionParameterComponent param = parameters.get(0); + assertThat(param.getName(), is(NamedQueryPlainProvider.SP_QUANTITY)); + assertThat(param.getType(), is("string")); + assertThat(param.getSearchTypeElement().asStringValue(), is(RestSearchParameterTypeEnum.QUANTITY.getCode())); + assertThat(param.getMin(), is(1)); + assertThat(param.getMax(), is("1")); + assertThat(param.getUse(), is(OperationParameterUse.IN)); + } + + @Test + public void testResourceLevelNamedQueryWithParameters() throws Exception { + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new NamedQueryResourceProvider()); + rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + validate(conformance); + + CapabilityStatementRestComponent restComponent = conformance.getRest().get(0); + CapabilityStatementRestResourceOperationComponent operationComponent = restComponent.getOperation().get(0); + String operationReference = operationComponent.getDefinition(); + assertThat(operationReference, not(nullValue())); + + OperationDefinition operationDefinition = (OperationDefinition) sc.readOperationDefinition(new IdType(operationReference), createRequestDetails(rs)); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(operationDefinition)); + validate(operationDefinition); + assertThat("The operation name should be the code if no description is set", operationDefinition.getName(), is("Search_" + NamedQueryResourceProvider.QUERY_NAME)); + String patientResourceName = "Patient"; + assertThat("A resource level search targets the resource of the provider it's defined in", operationDefinition.getResource().get(0).getValue(), is(patientResourceName)); + assertThat(operationDefinition.getSystem(), is(false)); + assertThat(operationDefinition.getType(), is(true)); + assertThat(operationDefinition.getInstance(), is(false)); + List parameters = operationDefinition.getParameter(); + assertThat(parameters.size(), is(1)); + OperationDefinitionParameterComponent param = parameters.get(0); + assertThat(param.getName(), is(NamedQueryResourceProvider.SP_PARAM)); + assertThat(param.getType(), is("string")); + assertThat(param.getSearchTypeElement().asStringValue(), is(RestSearchParameterTypeEnum.STRING.getCode())); + assertThat(param.getMin(), is(0)); + assertThat(param.getMax(), is("1")); + assertThat(param.getUse(), is(OperationParameterUse.IN)); + + CapabilityStatementRestResourceComponent patientResource = restComponent.getResource().stream() + .filter(r -> patientResourceName.equals(r.getType())) + .findAny().get(); + assertThat("Named query parameters should not appear in the resource search params", patientResource.getSearchParam(), is(empty())); + } + + @Test + public void testExtendedOperationAtTypeLevel() throws Exception { + RestfulServer rs = new RestfulServer(myCtx); + rs.setProviders(new TypeLevelOperationProvider()); + rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4")); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + + validate(conformance); + + List operations = conformance.getRest().get(0).getOperation(); + assertThat(operations.size(), is(1)); + assertThat(operations.get(0).getName(), is(TypeLevelOperationProvider.OPERATION_NAME)); + + OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType(operations.get(0).getDefinition()), createRequestDetails(rs)); + validate(opDef); + assertEquals(TypeLevelOperationProvider.OPERATION_NAME, opDef.getCode()); + assertThat(opDef.getSystem(), is(false)); + assertThat(opDef.getType(), is(true)); + assertThat(opDef.getInstance(), is(false)); + } + + @Test + public void testProfiledResourceStructureDefinitionLinks() throws Exception { + RestfulServer rs = new RestfulServer(myCtx); + rs.setResourceProviders(new ProfiledPatientProvider(), new MultipleProfilesPatientProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance)); + + List resources = conformance.getRestFirstRep().getResource(); + CapabilityStatementRestResourceComponent patientResource = resources.stream() + .filter(resource -> "Patient".equals(resource.getType())) + .findFirst().get(); + assertThat(patientResource.getProfile(), containsString(PATIENT_SUB)); + } + + @Test + public void testRevIncludes_Explicit() throws Exception { + + class PatientResourceProvider implements IResourceProvider { + + @Override + public Class getResourceType() { + return Patient.class; + } + + @Search + public List search(@IncludeParam(reverse = true, allow = {"Observation:foo", "Provenance:bar"}) Set theRevIncludes) { + return Collections.emptyList(); + } + + } + + class ObservationResourceProvider implements IResourceProvider { + + @Override + public Class getResourceType() { + return Observation.class; + } + + @Search + public List search(@OptionalParam(name = "subject") ReferenceParam theSubject) { + return Collections.emptyList(); + } + + } + + RestfulServer rs = new RestfulServer(myCtx); + rs.setResourceProviders(new PatientResourceProvider(), new ObservationResourceProvider()); + + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + sc.setRestResourceRevIncludesEnabled(true); + rs.setServerConformanceProvider(sc); + + rs.init(createServletConfig()); + + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance)); + + List resources = conformance.getRestFirstRep().getResource(); + CapabilityStatementRestResourceComponent patientResource = resources.stream() + .filter(resource -> "Patient".equals(resource.getType())) + .findFirst().get(); + assertThat(toStrings(patientResource.getSearchRevInclude()), containsInAnyOrder("Observation:foo", "Provenance:bar")); + } - @Description(shortDefinition = "This is a search for stuff!") - @Search - public List findDiagnosticReportsByPatient(@RequiredParam(name = DiagnosticReport.SP_SUBJECT + '.' + Patient.SP_IDENTIFIER) TokenParam thePatientId, - @OptionalParam(name = DiagnosticReport.SP_CODE) TokenOrListParam theNames, @OptionalParam(name = DiagnosticReport.SP_DATE) DateRangeParam theDateRange, - @IncludeParam(allow = {"DiagnosticReport.result"}) Set theIncludes) throws Exception { - return null; - } + @Test + public void testRevIncludes_Inferred() throws Exception { - } + class PatientResourceProvider implements IResourceProvider { - @SuppressWarnings("unused") - public static class ReadProvider { + @Override + public Class getResourceType() { + return Patient.class; + } - @Search(type = Patient.class) - public Patient findPatient(@Description(shortDefinition = "The patient's identifier (MRN or other card number)") @RequiredParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier) { - return null; - } + @Search + public List search(@IncludeParam(reverse = true) Set theRevIncludes) { + return Collections.emptyList(); + } + + } + + class ObservationResourceProvider implements IResourceProvider { + + @Override + public Class getResourceType() { + return Observation.class; + } + + @Search + public List search(@OptionalParam(name = "subject") ReferenceParam theSubject) { + return Collections.emptyList(); + } - @Read(version = false) - public Patient readPatient(@IdParam IdType theId) { - return null; - } + } + + RestfulServer rs = new RestfulServer(myCtx); + rs.setResourceProviders(new PatientResourceProvider(), new ObservationResourceProvider()); - } + ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs); + sc.setRestResourceRevIncludesEnabled(true); + rs.setServerConformanceProvider(sc); - @SuppressWarnings("unused") - public static class SearchProvider { + rs.init(createServletConfig()); - @Search(type = Patient.class) - public Patient findPatient1(@Description(shortDefinition = "The patient's identifier (MRN or other card number)") @RequiredParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier) { - return null; - } + CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs)); + ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance)); + + List resources = conformance.getRestFirstRep().getResource(); + CapabilityStatementRestResourceComponent patientResource = resources.stream() + .filter(resource -> "Patient".equals(resource.getType())) + .findFirst().get(); + assertThat(toStrings(patientResource.getSearchRevInclude()), containsInAnyOrder("Observation:subject")); + } - @Search(type = Patient.class) - public Patient findPatient2( - @Description(shortDefinition = "All patients linked to the given patient") @OptionalParam(name = "link", targetTypes = {Patient.class}) ReferenceAndListParam theLink) { - return null; - } + private List toOperationIdParts(List theOperation) { + ArrayList retVal = Lists.newArrayList(); + for (CapabilityStatementRestResourceOperationComponent next : theOperation) { + retVal.add(new IdType(next.getDefinition()).getIdPart()); + } + return retVal; + } + + private List toOperationNames(List theOperation) { + ArrayList retVal = Lists.newArrayList(); + for (CapabilityStatementRestResourceOperationComponent next : theOperation) { + retVal.add(next.getName()); + } + return retVal; + } + + private String validate(IBaseResource theResource) { + String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(theResource); + ourLog.info("Def:\n{}", conf); + + ValidationResult result = myValidator.validateWithResult(conf); + OperationOutcome operationOutcome = (OperationOutcome) result.toOperationOutcome(); + String outcome = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(operationOutcome); + ourLog.info("Outcome: {}", outcome); + + assertTrue(result.isSuccessful(), outcome); + List warningsAndErrors = operationOutcome + .getIssue() + .stream() + .filter(t -> t.getSeverity().ordinal() <= OperationOutcome.IssueSeverity.WARNING.ordinal()) // <= because this enum has a strange order + .collect(Collectors.toList()); + assertThat(outcome, warningsAndErrors, is(empty())); + + return myCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(theResource); + } + + @SuppressWarnings("unused") + public static class ConditionalProvider implements IResourceProvider { + + @Create + public MethodOutcome create(@ResourceParam Patient thePatient, @ConditionalUrlParam String theConditionalUrl) { + return null; + } + + @Delete + public MethodOutcome delete(@IdParam IdType theId, @ConditionalUrlParam(supportsMultiple = true) String theConditionalUrl) { + return null; + } + + @Override + public Class getResourceType() { + return Patient.class; + } + + @Update + public MethodOutcome update(@IdParam IdType theId, @ResourceParam Patient thePatient, @ConditionalUrlParam String theConditionalUrl) { + return null; + } + + } + + @SuppressWarnings("unused") + public static class InstanceHistoryProvider implements IResourceProvider { + @Override + public Class getResourceType() { + return Patient.class; + } + + @History + public List history(@IdParam IdType theId) { + return null; + } + + } + + @SuppressWarnings("unused") + public static class MultiOptionalProvider { + + @Search(type = Patient.class) + public Patient findPatient(@Description(shortDefinition = "The patient's identifier") @OptionalParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier, + @Description(shortDefinition = "The patient's name") @OptionalParam(name = Patient.SP_NAME) StringParam theName) { + return null; + } + + } + + @SuppressWarnings("unused") + public static class MultiTypeEncounterProvider implements IResourceProvider { + + @Operation(name = "someOp") + public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, + @OperationParam(name = "someOpParam1") DateType theStart, @OperationParam(name = "someOpParam2") Encounter theEnd) { + return null; + } + + @Override + public Class getResourceType() { + return Encounter.class; + } + + @Validate + public IBundleProvider validate(HttpServletRequest theServletRequest, @IdParam IdType theId, @ResourceParam Encounter thePatient) { + return null; + } + + } + + @SuppressWarnings("unused") + public static class MultiTypePatientProvider implements IResourceProvider { + + @Operation(name = "someOp") + public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, + @OperationParam(name = "someOpParam1") DateType theStart, @OperationParam(name = "someOpParam2") Patient theEnd) { + return null; + } + + @Override + public Class getResourceType() { + return Patient.class; + } + + @Validate + public IBundleProvider validate(HttpServletRequest theServletRequest, @IdParam IdType theId, @ResourceParam Patient thePatient) { + return null; + } + + } + + @SuppressWarnings("unused") + public static class NonConditionalProvider implements IResourceProvider { + + @Create + public MethodOutcome create(@ResourceParam Patient thePatient) { + return null; + } - } + @Delete + public MethodOutcome delete(@IdParam IdType theId) { + return null; + } + + @Override + public Class getResourceType() { + return Patient.class; + } - @SuppressWarnings("unused") - public static class SearchProviderWithWhitelist { + @Update + public MethodOutcome update(@IdParam IdType theId, @ResourceParam Patient thePatient) { + return null; + } - @Search(type = Patient.class) - public Patient findPatient1(@Description(shortDefinition = "The organization at which this person is a patient") @RequiredParam(name = Patient.SP_ORGANIZATION, chainWhitelist = {"foo", - "bar"}) ReferenceAndListParam theIdentifier) { - return null; - } + } - } + @SuppressWarnings("unused") + public static class PlainProviderWithExtendedOperationOnNoType { - @SuppressWarnings("unused") - public static class SearchProviderWithListNoType implements IResourceProvider { + @Operation(name = "plain", idempotent = true, returnParameters = {@OperationParam(min = 1, max = 2, name = "out1", type = StringType.class)}) + public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, @OperationParam(name = "start") DateType theStart, + @OperationParam(name = "end") DateType theEnd) { + return null; + } - @Override - public Class getResourceType() { - return Patient.class; - } + } + @SuppressWarnings("unused") + public static class ProviderWithExtendedOperationReturningBundle implements IResourceProvider { - @Search() - public List findPatient1(@Description(shortDefinition = "The organization at which this person is a patient") @RequiredParam(name = Patient.SP_ORGANIZATION) ReferenceAndListParam theIdentifier) { - return null; - } + @Operation(name = "everything", idempotent = true) + public IBundleProvider everything(HttpServletRequest theServletRequest, @IdParam IdType theId, @OperationParam(name = "start") DateType theStart, + @OperationParam(name = "end") DateType theEnd) { + return null; + } - } + @Override + public Class getResourceType() { + return Patient.class; + } - @SuppressWarnings("unused") - public static class SearchProviderWithListWithType implements IResourceProvider { + } - @Override - public Class getResourceType() { - return Patient.class; - } + @SuppressWarnings("unused") + public static class ProviderWithRequiredAndOptional { + @Description(shortDefinition = "This is a search for stuff!") + @Search + public List findDiagnosticReportsByPatient(@RequiredParam(name = DiagnosticReport.SP_SUBJECT + '.' + Patient.SP_IDENTIFIER) TokenParam thePatientId, + @OptionalParam(name = DiagnosticReport.SP_CODE) TokenOrListParam theNames, @OptionalParam(name = DiagnosticReport.SP_DATE) DateRangeParam theDateRange, + @IncludeParam(allow = {"DiagnosticReport.result"}) Set theIncludes) throws Exception { + return null; + } - @Search(type = Patient.class) - public List findPatient1(@Description(shortDefinition = "The organization at which this person is a patient") @RequiredParam(name = Patient.SP_ORGANIZATION) ReferenceAndListParam theIdentifier) { - return null; - } + } - } + @SuppressWarnings("unused") + public static class ReadProvider { - public static class SystemHistoryProvider { + @Search(type = Patient.class) + public Patient findPatient(@Description(shortDefinition = "The patient's identifier (MRN or other card number)") @RequiredParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier) { + return null; + } - @History - public List history() { - return null; - } + @Read(version = false) + public Patient readPatient(@IdParam IdType theId) { + return null; + } - } + } - public static class TypeHistoryProvider implements IResourceProvider { + @SuppressWarnings("unused") + public static class SearchProvider { - @Override - public Class getResourceType() { - return Patient.class; - } + @Search(type = Patient.class) + public Patient findPatient1(@Description(shortDefinition = "The patient's identifier (MRN or other card number)") @RequiredParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier) { + return null; + } - @History - public List history() { - return null; - } + @Search(type = Patient.class) + public Patient findPatient2( + @Description(shortDefinition = "All patients linked to the given patient") @OptionalParam(name = "link", targetTypes = {Patient.class}) ReferenceAndListParam theLink) { + return null; + } - } + } - @SuppressWarnings("unused") - public static class VreadProvider { + @SuppressWarnings("unused") + public static class SearchProviderWithWhitelist { - @Search(type = Patient.class) - public Patient findPatient(@Description(shortDefinition = "The patient's identifier (MRN or other card number)") @RequiredParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier) { - return null; - } + @Search(type = Patient.class) + public Patient findPatient1(@Description(shortDefinition = "The organization at which this person is a patient") @RequiredParam(name = Patient.SP_ORGANIZATION, chainWhitelist = {"foo", + "bar"}) ReferenceAndListParam theIdentifier) { + return null; + } - @Read(version = true) - public Patient readPatient(@IdParam IdType theId) { - return null; - } + } - } + @SuppressWarnings("unused") + public static class SearchProviderWithListNoType implements IResourceProvider { - public static class TypeLevelOperationProvider implements IResourceProvider { + @Override + public Class getResourceType() { + return Patient.class; + } - public static final String OPERATION_NAME = "op"; - @Operation(name = OPERATION_NAME, idempotent = true) - public IBundleProvider op() { - return null; - } + @Search() + public List findPatient1(@Description(shortDefinition = "The organization at which this person is a patient") @RequiredParam(name = Patient.SP_ORGANIZATION) ReferenceAndListParam theIdentifier) { + return null; + } - @Override - public Class getResourceType() { - return Patient.class; - } + } - } + @SuppressWarnings("unused") + public static class SearchProviderWithListWithType implements IResourceProvider { - public static class NamedQueryPlainProvider { + @Override + public Class getResourceType() { + return Patient.class; + } - public static final String QUERY_NAME = "testQuery"; - public static final String DESCRIPTION = "A query description"; - public static final String SP_QUANTITY = "quantity"; - @Search(queryName = QUERY_NAME) - @Description(formalDefinition = DESCRIPTION) - public Bundle findAllGivenParameter(@RequiredParam(name = SP_QUANTITY) QuantityParam quantity) { - return null; - } - } + @Search(type = Patient.class) + public List findPatient1(@Description(shortDefinition = "The organization at which this person is a patient") @RequiredParam(name = Patient.SP_ORGANIZATION) ReferenceAndListParam theIdentifier) { + return null; + } - public static class NamedQueryResourceProvider implements IResourceProvider { + } - public static final String QUERY_NAME = "testQuery"; - public static final String SP_PARAM = "param"; + public static class SystemHistoryProvider { - @Override - public Class getResourceType() { - return Patient.class; - } + @History + public List history() { + return null; + } - @Search(queryName = QUERY_NAME) - public Bundle findAllGivenParameter(@OptionalParam(name = SP_PARAM) StringParam param) { - return null; - } + } - } + public static class TypeHistoryProvider implements IResourceProvider { - public static class ProfiledPatientProvider implements IResourceProvider { + @Override + public Class getResourceType() { + return Patient.class; + } - @Override - public Class getResourceType() { - return PatientSubSub2.class; - } + @History + public List history() { + return null; + } - @Search - public List find() { - return null; - } - } + } - public static class MultipleProfilesPatientProvider implements IResourceProvider { + @SuppressWarnings("unused") + public static class VreadProvider { - @Override - public Class getResourceType() { - return PatientSubSub.class; - } + @Search(type = Patient.class) + public Patient findPatient(@Description(shortDefinition = "The patient's identifier (MRN or other card number)") @RequiredParam(name = Patient.SP_IDENTIFIER) TokenParam theIdentifier) { + return null; + } - @Read(type = PatientTripleSub.class) - public PatientTripleSub read(@IdParam IdType theId) { - return null; - } + @Read(version = true) + public Patient readPatient(@IdParam IdType theId) { + return null; + } - } + } - @ResourceDef(id = PATIENT_SUB) - public static class PatientSub extends Patient { - } + public static class TypeLevelOperationProvider implements IResourceProvider { - @ResourceDef(id = PATIENT_SUB_SUB) - public static class PatientSubSub extends PatientSub { - } + public static final String OPERATION_NAME = "op"; - @ResourceDef(id = PATIENT_SUB_SUB_2) - public static class PatientSubSub2 extends PatientSub { - } + @Operation(name = OPERATION_NAME, idempotent = true) + public IBundleProvider op() { + return null; + } - @ResourceDef(id = PATIENT_TRIPLE_SUB) - public static class PatientTripleSub extends PatientSubSub { - } + @Override + public Class getResourceType() { + return Patient.class; + } - private static Set toStrings(Collection theType) { - HashSet retVal = new HashSet(); - for (IPrimitiveType next : theType) { - retVal.add(next.getValueAsString()); - } - return retVal; - } + } - @AfterAll - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); - } + public static class NamedQueryPlainProvider { + + public static final String QUERY_NAME = "testQuery"; + public static final String DESCRIPTION = "A query description"; + public static final String SP_QUANTITY = "quantity"; + + @Search(queryName = QUERY_NAME) + @Description(formalDefinition = DESCRIPTION) + public Bundle findAllGivenParameter(@RequiredParam(name = SP_QUANTITY) QuantityParam quantity) { + return null; + } + } + + public static class NamedQueryResourceProvider implements IResourceProvider { + + public static final String QUERY_NAME = "testQuery"; + public static final String SP_PARAM = "param"; + + @Override + public Class getResourceType() { + return Patient.class; + } + + @Search(queryName = QUERY_NAME) + public Bundle findAllGivenParameter(@OptionalParam(name = SP_PARAM) StringParam param) { + return null; + } + + } + + public static class ProfiledPatientProvider implements IResourceProvider { + + @Override + public Class getResourceType() { + return PatientSubSub2.class; + } + + @Search + public List find() { + return null; + } + } + + public static class MultipleProfilesPatientProvider implements IResourceProvider { + + @Override + public Class getResourceType() { + return PatientSubSub.class; + } + + @Read(type = PatientTripleSub.class) + public PatientTripleSub read(@IdParam IdType theId) { + return null; + } + + } + + @ResourceDef(id = PATIENT_SUB) + public static class PatientSub extends Patient { + } + + @ResourceDef(id = PATIENT_SUB_SUB) + public static class PatientSubSub extends PatientSub { + } + + @ResourceDef(id = PATIENT_SUB_SUB_2) + public static class PatientSubSub2 extends PatientSub { + } + + @ResourceDef(id = PATIENT_TRIPLE_SUB) + public static class PatientTripleSub extends PatientSubSub { + } + + private static Set toStrings(Collection theType) { + HashSet retVal = new HashSet(); + for (IPrimitiveType next : theType) { + retVal.add(next.getValueAsString()); + } + return retVal; + } + + @AfterAll + public static void afterClassClearContext() { + TestUtil.clearAllStaticFieldsForUnitTest(); + } } From ec7cc8fbaa38ee76dddf6b89936aff304dc3fe66 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Thu, 8 Apr 2021 14:07:16 -0400 Subject: [PATCH 29/61] Fixed poorly formatted changelog entry. (#2531) --- ...ion-and-address-and-field-level-validation.yaml | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2449-interceptors-to-handle-standardization-normalization-and-address-and-field-level-validation.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2449-interceptors-to-handle-standardization-normalization-and-address-and-field-level-validation.yaml index 900e4266f9d..990467fdb74 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2449-interceptors-to-handle-standardization-normalization-and-address-and-field-level-validation.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2449-interceptors-to-handle-standardization-normalization-and-address-and-field-level-validation.yaml @@ -2,6 +2,14 @@ type: add issue: 2449 title: "Adds interceptors for the following functionality: -* Data normalization (n11n) - removing unwanted characters (control, etc. as defined by the requirements) -* Data standardization (s13n) - normalizing data by ensuring word spacing and character cases are uniform -* Data validation - making sure that addresses / emails are validated" +
      +
    • + Data normalization (n11n) - removing unwanted characters (control, etc. as defined by the requirements) +
    • +
    • + Data standardization (s13n) - normalizing data by ensuring word spacing and character cases are uniform +
    • +
    • + Data validation - making sure that addresses / emails are validated +
    • +
    " From 38436e5c1bd04e7a4ca81f078eb18ddabc2bd533 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Thu, 8 Apr 2021 14:57:20 -0400 Subject: [PATCH 30/61] Fix a typo in docs. (#2532) * Fixed poorly formatted changelog entry. * Fixed typo. --- .../hapi/fhir/docs/interceptors/built_in_server_interceptors.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md index f5177609a67..0be0c864f14 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md @@ -299,7 +299,7 @@ The RepositoryValidatingInterceptor can be used to enforce validation rules on d # Data Standardization -`StandardizingInterceptor` handles data standardization (s13n) requirements. This interceptor applies standardization rules on all FHIR primitives as configured in the `s13n.json` file that should be made available on the classpath. This file contains FHIRPath definitions together with the standardizers that should be applied to that path. It comes with six per-build standardizers: NAME_FAMILY, NAME_GIVEN, EMAIL, TITLE, PHONE and TEXT. Custom standardizers can be developed by implementing `ca.uhn.fhir.rest.server.interceptor.s13n.standardizers.IStandardizer` interface. +`StandardizingInterceptor` handles data standardization (s13n) requirements. This interceptor applies standardization rules on all FHIR primitives as configured in the `s13n.json` file that should be made available on the classpath. This file contains FHIRPath definitions together with the standardizers that should be applied to that path. It comes with six pre-built standardizers: NAME_FAMILY, NAME_GIVEN, EMAIL, TITLE, PHONE and TEXT. Custom standardizers can be developed by implementing `ca.uhn.fhir.rest.server.interceptor.s13n.standardizers.IStandardizer` interface. A sample configuration file can be found below: From fa921a3cc9fb9933c9a5310c92b2bfa41386934b Mon Sep 17 00:00:00 2001 From: Eric Prud'hommeaux Date: Fri, 9 Apr 2021 11:59:26 +0200 Subject: [PATCH 31/61] ~ inspect instant for specific RDF datatype (#2473) --- .../main/java/ca/uhn/fhir/parser/RDFParser.java | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java index a221a4175d2..cad77bb1d02 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java @@ -300,7 +300,7 @@ public class RDFParser extends BaseParser { String propertyName = constructPredicateName(resource, childDefinition, childName, parentElement); if (element != null) { - XSDDatatype dataType = getXSDDataTypeForFhirType(element.fhirType()); + XSDDatatype dataType = getXSDDataTypeForFhirType(element.fhirType(), encodedValue); rdfResource.addProperty(rdfModel.createProperty(propertyName), this.createFhirValueBlankNode(rdfModel, encodedValue, dataType, cardinalityIndex)); } } @@ -314,7 +314,7 @@ public class RDFParser extends BaseParser { if (value != null || !hasNoExtensions(pd)) { if (value != null) { String propertyName = constructPredicateName(resource, childDefinition, childName, parentElement); - XSDDatatype dataType = getXSDDataTypeForFhirType(pd.fhirType()); + XSDDatatype dataType = getXSDDataTypeForFhirType(pd.fhirType(), value); Resource valueResource = this.createFhirValueBlankNode(rdfModel, value, dataType, cardinalityIndex); if (!hasNoExtensions(pd)) { IBaseHasExtensions hasExtension = (IBaseHasExtensions)pd; @@ -411,7 +411,7 @@ public class RDFParser extends BaseParser { * @param fhirType hapi field type * @return XSDDatatype value */ - private XSDDatatype getXSDDataTypeForFhirType(String fhirType) { + private XSDDatatype getXSDDataTypeForFhirType(String fhirType, String value) { switch (fhirType) { case "boolean": return XSDDatatype.XSDboolean; @@ -423,7 +423,16 @@ public class RDFParser extends BaseParser { return XSDDatatype.XSDdate; case "dateTime": case "instant": - return XSDDatatype.XSDdateTime; + switch (value.length()) { // assumes valid lexical value + case 4: + return XSDDatatype.XSDgYear; + case 7: + return XSDDatatype.XSDgYearMonth; + case 10: + return XSDDatatype.XSDdate; + default: + return XSDDatatype.XSDdateTime; + } case "code": case "string": default: From 82fc7b745781d78933465bc5a1a54fa14665b54e Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 9 Apr 2021 10:51:12 -0400 Subject: [PATCH 32/61] Fix typo --- .../ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md index 51180269ded..42302e38332 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_expansion.md @@ -5,7 +5,7 @@ Once you have MDM enabled, and you have many linked resources, it can be useful Patient/1 --> Patient/3 Patient/2 --> Patient/3 ``` -This indicates that both Patient/1 and Patient/2 are MDM-mathed to the same golden resource (Patient/3). +This indicates that both Patient/1 and Patient/2 are MDM-matched to the same golden resource (Patient/3). What if you want to get all observations from Patient/1, but also include any observations from all of their linked resources. You could do this by first querying the [$mdm-query-links](/docs/server_jpa_mdm/mdm_operations.html) endpoint, and then making a subsequent call like the following ```http request GET http://example.com:8000/Observation?subject=Patient/1,Patient/2,Patient/3 From 1bd585162d4484a9ef45817e3952ef1f38fe838d Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Fri, 9 Apr 2021 18:50:12 -0400 Subject: [PATCH 33/61] Fix typo in docs. (#2536) --- .../hapi/fhir/docs/interceptors/built_in_server_interceptors.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md index 0be0c864f14..f21e80292ec 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/built_in_server_interceptors.md @@ -117,7 +117,7 @@ If you wish to override this behaviour and supply a static CapabilityStatement, * [StaticCapabilityStatementInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.html) * [StaticCapabilityStatementInterceptor Source](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.java) -The following example shows how to register the ExceptionHandlingInterceptor. +The following example shows how to register the StaticCapabilityStatementInterceptor. ```java {{snippet:classpath:/ca/uhn/hapi/fhir/docs/ServletExamples.java|staticCapabilityStatementInterceptor}} From bc4266d3d2588df7e104e86cdfb30b985cf1cfb9 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Mon, 12 Apr 2021 06:00:31 -0400 Subject: [PATCH 34/61] Improve narrative templates for custom structures (#2537) * Improve narrative templates for custom structures * Add changelog * Test fix --- ...BaseRuntimeElementCompositeDefinition.java | 6 + .../java/ca/uhn/fhir/context/FhirContext.java | 6 +- .../fhir/model/api/annotation/Extension.java | 4 +- .../fhir/narrative2/INarrativeTemplate.java | 2 +- .../fhir/narrative2/NarrativeTemplate.java | 11 +- .../narrative2/NarrativeTemplateManifest.java | 98 ++++++++++------ .../java/ca/uhn/fhir/parser/BaseParser.java | 7 +- .../java/ca/uhn/fhir/parser/JsonParser.java | 3 +- .../uhn/fhir/narrative/OperationOutcome.html | 9 +- ...-improve_narrative_for_custom_structs.yaml | 5 + .../hapi/fhir/docs/model/custom_structures.md | 109 +++++++++++++++++- .../fhir/docs/model/narrative_generation.md | 28 +++-- .../docs/model/profiles_and_extensions.md | 103 +---------------- .../ca/uhn/fhir/narrative/CustomPatient.java | 29 +++++ ...stomThymeleafNarrativeGeneratorR4Test.java | 99 ++++++++++++++++ ...aultThymeleafNarrativeGeneratorR4Test.java | 23 ++-- .../narrative/FavouritePizzaExtension.java | 45 ++++++++ .../customtypes_CustomPatientR4.html | 6 + ...customtypes_FavouritePizzaExtensionR4.html | 10 ++ .../narrative/customtypes_r4.properties | 15 +++ .../standardtypes_PractitionerR4.html | 24 ++++ .../narrative/standardtypes_r4.properties | 19 +++ 22 files changed, 483 insertions(+), 178 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2537-improve_narrative_for_custom_structs.yaml create mode 100644 hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomPatient.java create mode 100644 hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java create mode 100644 hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/FavouritePizzaExtension.java create mode 100644 hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_CustomPatientR4.html create mode 100644 hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_FavouritePizzaExtensionR4.html create mode 100644 hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_r4.properties create mode 100644 hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_PractitionerR4.html create mode 100644 hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_r4.properties diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java index 3a1d213f34b..c593c04387f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java @@ -38,6 +38,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import org.apache.commons.lang3.builder.ToStringBuilder; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBackboneElement; @@ -601,6 +602,11 @@ public abstract class BaseRuntimeElementCompositeDefinition ext public boolean isFirstFieldInNewClass() { return myFirstFieldInNewClass; } + + @Override + public String toString() { + return myField.getName(); + } } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java index 60aebe8b2c4..6a4a7d01695 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java @@ -902,21 +902,21 @@ public class FhirContext { } private BaseRuntimeElementDefinition scanDatatype(final Class theResourceType) { - ArrayList> resourceTypes = new ArrayList>(); + ArrayList> resourceTypes = new ArrayList<>(); resourceTypes.add(theResourceType); Map, BaseRuntimeElementDefinition> defs = scanResourceTypes(resourceTypes); return defs.get(theResourceType); } private RuntimeResourceDefinition scanResourceType(final Class theResourceType) { - ArrayList> resourceTypes = new ArrayList>(); + ArrayList> resourceTypes = new ArrayList<>(); resourceTypes.add(theResourceType); Map, BaseRuntimeElementDefinition> defs = scanResourceTypes(resourceTypes); return (RuntimeResourceDefinition) defs.get(theResourceType); } private synchronized Map, BaseRuntimeElementDefinition> scanResourceTypes(final Collection> theResourceTypes) { - List> typesToScan = new ArrayList>(); + List> typesToScan = new ArrayList<>(); if (theResourceTypes != null) { typesToScan.addAll(theResourceTypes); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java index 13e554bce5e..af91f6109a9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java @@ -49,13 +49,13 @@ public @interface Extension { * by regional authorities or jurisdictional governments) *

    */ - boolean definedLocally(); + boolean definedLocally() default true; /** * Returns true if this extension is a modifier extension */ - boolean isModifier(); + boolean isModifier() default false; /** * The URL associated with this extension diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java index a384de8a1b0..9794cd06032 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java @@ -32,7 +32,7 @@ public interface INarrativeTemplate { Set getAppliesToResourceTypes(); - Set> getAppliesToResourceClasses(); + Set> getAppliesToClasses(); TemplateTypeEnum getTemplateType(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java index e38b79bea74..f6367b580c7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java @@ -34,7 +34,7 @@ public class NarrativeTemplate implements INarrativeTemplate { private Set myAppliesToProfiles = new HashSet<>(); private Set myAppliesToResourceTypes = new HashSet<>(); private Set myAppliesToDataTypes = new HashSet<>(); - private Set> myAppliesToResourceClasses = new HashSet<>(); + private Set> myAppliesToClasses = new HashSet<>(); private TemplateTypeEnum myTemplateType = TemplateTypeEnum.THYMELEAF; private String myContextPath; private String myTemplateName; @@ -79,12 +79,12 @@ public class NarrativeTemplate implements INarrativeTemplate { } @Override - public Set> getAppliesToResourceClasses() { - return Collections.unmodifiableSet(myAppliesToResourceClasses); + public Set> getAppliesToClasses() { + return Collections.unmodifiableSet(myAppliesToClasses); } - void addAppliesToResourceClass(Class theAppliesToResourceClass) { - myAppliesToResourceClasses.add(theAppliesToResourceClass); + void addAppliesToClass(Class theAppliesToClass) { + myAppliesToClasses.add(theAppliesToClass); } @Override @@ -118,4 +118,5 @@ public class NarrativeTemplate implements INarrativeTemplate { void addAppliesToDatatype(String theDataType) { myAppliesToDataTypes.add(theDataType); } + } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java index b795ec80aa7..8ef69632c32 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java @@ -23,6 +23,7 @@ package ca.uhn.fhir.narrative2; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import com.google.common.base.Charsets; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -32,8 +33,20 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.*; -import java.util.*; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -41,15 +54,17 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class NarrativeTemplateManifest implements INarrativeTemplateManifest { private static final Logger ourLog = LoggerFactory.getLogger(NarrativeTemplateManifest.class); - private final Map> myStyleToResourceTypeToTemplate; - private final Map> myStyleToDatatypeToTemplate; - private final Map> myStyleToNameToTemplate; + private final Map> myResourceTypeToTemplate; + private final Map> myDatatypeToTemplate; + private final Map> myNameToTemplate; + private final Map> myClassToTemplate; private final int myTemplateCount; private NarrativeTemplateManifest(Collection theTemplates) { Map> resourceTypeToTemplate = new HashMap<>(); Map> datatypeToTemplate = new HashMap<>(); Map> nameToTemplate = new HashMap<>(); + Map> classToTemplate = new HashMap<>(); for (NarrativeTemplate nextTemplate : theTemplates) { nameToTemplate.computeIfAbsent(nextTemplate.getTemplateName(), t -> new ArrayList<>()).add(nextTemplate); @@ -59,12 +74,16 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { for (String nextDataType : nextTemplate.getAppliesToDataTypes()) { datatypeToTemplate.computeIfAbsent(nextDataType.toUpperCase(), t -> new ArrayList<>()).add(nextTemplate); } + for (Class nextAppliesToClass : nextTemplate.getAppliesToClasses()) { + classToTemplate.computeIfAbsent(nextAppliesToClass.getName(), t -> new ArrayList<>()).add(nextTemplate); + } } myTemplateCount = theTemplates.size(); - myStyleToNameToTemplate = makeImmutable(nameToTemplate); - myStyleToResourceTypeToTemplate = makeImmutable(resourceTypeToTemplate); - myStyleToDatatypeToTemplate = makeImmutable(datatypeToTemplate); + myClassToTemplate = makeImmutable(classToTemplate); + myNameToTemplate = makeImmutable(nameToTemplate); + myResourceTypeToTemplate = makeImmutable(resourceTypeToTemplate); + myDatatypeToTemplate = makeImmutable(datatypeToTemplate); } public int getNamedTemplateCount() { @@ -73,23 +92,27 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { @Override public List getTemplateByResourceName(FhirContext theFhirContext, EnumSet theStyles, String theResourceName) { - return getFromMap(theStyles, theResourceName.toUpperCase(), myStyleToResourceTypeToTemplate); + return getFromMap(theStyles, theResourceName.toUpperCase(), myResourceTypeToTemplate); } @Override public List getTemplateByName(FhirContext theFhirContext, EnumSet theStyles, String theName) { - return getFromMap(theStyles, theName, myStyleToNameToTemplate); + return getFromMap(theStyles, theName, myNameToTemplate); } @Override public List getTemplateByElement(FhirContext theFhirContext, EnumSet theStyles, IBase theElement) { - if (theElement instanceof IBaseResource) { - String resourceName = theFhirContext.getResourceDefinition((IBaseResource) theElement).getName(); - return getTemplateByResourceName(theFhirContext, theStyles, resourceName); - } else { - String datatypeName = theFhirContext.getElementDefinition(theElement.getClass()).getName(); - return getFromMap(theStyles, datatypeName.toUpperCase(), myStyleToDatatypeToTemplate); + List retVal = getFromMap(theStyles, theElement.getClass().getName(), myClassToTemplate); + if (retVal.isEmpty()) { + if (theElement instanceof IBaseResource) { + String resourceName = theFhirContext.getResourceDefinition((IBaseResource) theElement).getName(); + retVal = getTemplateByResourceName(theFhirContext, theStyles, resourceName); + } else { + String datatypeName = theFhirContext.getElementDefinition(theElement.getClass()).getName(); + retVal = getFromMap(theStyles, datatypeName.toUpperCase(), myDatatypeToTemplate); + } } + return retVal; } public static NarrativeTemplateManifest forManifestFileLocation(String... thePropertyFilePaths) throws IOException { @@ -134,9 +157,16 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { NarrativeTemplate nextTemplate = nameToTemplate.computeIfAbsent(name, t -> new NarrativeTemplate().setTemplateName(name)); - Validate.isTrue(!nextKey.endsWith(".class"), "Narrative manifest does not support specifying templates by class name - Use \"[name].resourceType=[resourceType]\" instead"); - - if (nextKey.endsWith(".profile")) { + if (nextKey.endsWith(".class")) { + String className = file.getProperty(nextKey); + if (isNotBlank(className)) { + try { + nextTemplate.addAppliesToClass((Class) Class.forName(className)); + } catch (ClassNotFoundException theE) { + throw new InternalErrorException("Could not find class " + className + " declared in narative manifest"); + } + } + } else if (nextKey.endsWith(".profile")) { String profile = file.getProperty(nextKey); if (isNotBlank(profile)) { nextTemplate.addAppliesToProfile(profile); @@ -144,17 +174,17 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { } else if (nextKey.endsWith(".resourceType")) { String resourceType = file.getProperty(nextKey); Arrays - .stream(resourceType.split(",")) - .map(t -> t.trim()) - .filter(t -> isNotBlank(t)) - .forEach(t -> nextTemplate.addAppliesToResourceType(t)); + .stream(resourceType.split(",")) + .map(t -> t.trim()) + .filter(t -> isNotBlank(t)) + .forEach(t -> nextTemplate.addAppliesToResourceType(t)); } else if (nextKey.endsWith(".dataType")) { String dataType = file.getProperty(nextKey); Arrays - .stream(dataType.split(",")) - .map(t -> t.trim()) - .filter(t -> isNotBlank(t)) - .forEach(t -> nextTemplate.addAppliesToDatatype(t)); + .stream(dataType.split(",")) + .map(t -> t.trim()) + .filter(t -> isNotBlank(t)) + .forEach(t -> nextTemplate.addAppliesToDatatype(t)); } else if (nextKey.endsWith(".style")) { String templateTypeName = file.getProperty(nextKey).toUpperCase(); TemplateTypeEnum templateType = TemplateTypeEnum.valueOf(templateTypeName); @@ -171,9 +201,9 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { } else if (nextKey.endsWith(".title")) { ourLog.debug("Ignoring title property as narrative generator no longer generates titles: {}", nextKey); } else { - throw new ConfigurationException("Invalid property name: " + nextKey - + " - the key must end in one of the expected extensions " - + "'.profile', '.resourceType', '.dataType', '.style', '.contextPath', '.narrative', '.title'"); + throw new ConfigurationException("Invalid property name: " + nextKey + + " - the key must end in one of the expected extensions " + + "'.profile', '.resourceType', '.dataType', '.style', '.contextPath', '.narrative', '.title'"); } } @@ -210,10 +240,10 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { private static List getFromMap(EnumSet theStyles, T theKey, Map> theMap) { return theMap - .getOrDefault(theKey, Collections.emptyList()) - .stream() - .filter(t->theStyles.contains(t.getTemplateType())) - .collect(Collectors.toList()); + .getOrDefault(theKey, Collections.emptyList()) + .stream() + .filter(t -> theStyles.contains(t.getTemplateType())) + .collect(Collectors.toList()); } private static Map> makeImmutable(Map> theStyleToResourceTypeToTemplate) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java index 70259777a36..20976d44fdd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java @@ -980,7 +980,12 @@ public abstract class BaseParser implements IParser { myEncodeContext = theEncodeContext; } - private void addParent(CompositeChildElement theParent, StringBuilder theB) { + @Override + public String toString() { + return myDef.getElementName(); + } + + private void addParent(CompositeChildElement theParent, StringBuilder theB) { if (theParent != null) { if (theParent.myResDef != null) { theB.append(theParent.myResDef.getName()); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java index 0a7c5df81d6..a57587e24e8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java @@ -379,7 +379,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } boolean haveWrittenExtensions = false; - for (CompositeChildElement nextChildElem : super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext)) { + Iterable compositeChildElements = super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext); + for (CompositeChildElement nextChildElem : compositeChildElements) { BaseRuntimeChildDefinition nextChild = nextChildElem.getDef(); diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/narrative/OperationOutcome.html b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/narrative/OperationOutcome.html index d38bf4cec78..a84249a298f 100644 --- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/narrative/OperationOutcome.html +++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/narrative/OperationOutcome.html @@ -11,14 +11,7 @@ - - -
    
    -					
    -					
    -						
    
    -					
    -				
    +				
    
     			
     		
     	
    diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2537-improve_narrative_for_custom_structs.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2537-improve_narrative_for_custom_structs.yaml new file mode 100644 index 00000000000..a55ad8a2a58 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2537-improve_narrative_for_custom_structs.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 2537 +title: "It is now possible t create narrative generator templates that apply to any + custom strucures including custom extension structures." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/custom_structures.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/custom_structures.md index 5f2633433c1..4fcf4e9855a 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/custom_structures.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/custom_structures.md @@ -7,12 +7,115 @@ This process is described on the [Profiles & Extensions](./profiles_and_exte There are situations however when you might want to create an entirely custom resource type. This feature should be used only if there is no other option, since it means you are creating a resource type that will not be interoperable with other FHIR implementations.

    -This is an advanced features and isn't needed for most uses of HAPI-FHIR. Feel free to skip this page. +This is an advanced features and isn't needed for most uses of HAPI FHIR. Feel free to skip this page. For a simpler way of interacting with resource extensions, see Profiles & Extensions.

    - + +# Extending FHIR Resource Classes + +The most elegant way of adding extensions to a resource is through the use of custom fields. The following example shows a custom type which extends the FHIR Patient resource definition through two extensions. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatient.java|patientDef}} +``` + +Using this custom type is as simple as instantiating the type and working with the new fields. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientUse}} +``` + +This example produces the following output: + +```xml + + + + + + + + + + + + + + + + + +``` + +Parsing messages using your new custom type is equally simple. These types can also be used as method return types in clients and servers. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientParse}} +``` + +# Using Custom Types in a Client + +If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}} +``` + +You may also explicitly use custom types in searches and other operations which return resources. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch}} +``` + +You can also explicitly declare a preferred response resource custom type. This is useful for some operations that do not otherwise declare their resource types in the method signature. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch2}} +``` + +## Using Multiple Custom Types in a Client + +Sometimes you may not know in advance exactly which type you will be receiving. For example, there are Patient resources which conform to several different profiles on a server and you aren't sure which profile you will get back for a specific read, you can declare the "primary" type for a given profile. + +This is declared at the FhirContext level, and will apply to any clients created from this context (including clients created before the default was set). + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientDeclared}} +``` +# Using Custom Types in a Server + +If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}} +``` + +# Custom Composite Extension Classes + +The following example shows a resource containing a composite extension. + +```java +{{snippet:classpath:/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java|resource}} +``` + +This could be used to create a resource such as the following: + +```xml + + + + + + + + + + + +``` + # Custom Resource Structure -The following example shows a custom resource structure class: +The following example shows a custom resource structure class creating an entirely new resource type as opposed to simply extending an existing one. Note that this is allowable in FHIR, but is **highly discouraged** as they are by definition not good for interoperability. ```java {{snippet:classpath:/ca/uhn/hapi/fhir/docs/customtype/CustomResource.java|resource}} diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/narrative_generation.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/narrative_generation.md index bdabbff8fc6..36fa258c477 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/narrative_generation.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/narrative_generation.md @@ -63,22 +63,32 @@ Then create a properties file which describes your templates. In this properties The first (name.class) defines the class name of the resource to define a template for. The second (name.narrative) defines the path/classpath to the template file. The format of this path is `file:/path/foo.html` or `classpath:/com/classpath/foo.html`. ```properties -# Two property lines in the file per template +# Two property lines in the file per template. There are several forms you +# can use. This first form assigns a template type to a resource by +# resource name practitioner.resourceType=Practitioner -practitioner.narrative=file:src/test/resources/narrative/Practitioner.html +practitioner.narrative=classpath:com/example/narrative/Practitioner.html -observation.class=ca.uhn.fhir.model.dstu.resource.Observation -observation.narrative=file:src/test/resources/narrative/Observation.html +# This second form assigns a template by class name. This can be used for +# HAPI FHIR built-in structures, or for custom structures as well. +observation.class=org.hl7.fhir.r4.model.Observation +observation.narrative=classpath:com/example/narrative/Observation.html -# etc... +# You can also assign a template based on profile ID (Resource.meta.profile) +vitalsigns.profile=http://hl7.org/fhir/StructureDefinition/vitalsigns +vitalsigns.narrative=classpath:com/example/narrative/Observation_Vitals.html ``` -You may also override/define behaviour for datatypes. These datatype narrative definitions will be used as content within th:narrative blocks in resource templates. See the example resource template above for an example. +You may also override/define behaviour for datatypes and other structures. These datatype narrative definitions will be used as content within th:narrative blocks in resource templates. See the example resource template above for an example. ```properties -# datatypes use the same format as resources -humanname.resourceType=HumanNameDt -humanname.narrative=classpath:ca/uhn/fhir/narrative/HumanNameDt.html]]> +# You can create a template based on a type name +quantity.dataType=Quantity +quantity.narrative=classpath:com/example/narrative/Quantity.html + +# Or by class name, which can be useful for custom datatypes and structures +custom_extension.class=com.example.model.MyCustomExtension +custom_extension.narrative=classpath:com/example/narrative/CustomExtension.html ``` Finally, use the [CustomThymeleafNarrativeGenerator](/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGenerator.html) and provide it to the FhirContext. diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/profiles_and_extensions.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/profiles_and_extensions.md index f0e956f4218..aa8fbd78830 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/profiles_and_extensions.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/profiles_and_extensions.md @@ -70,105 +70,8 @@ HAPI provides a few ways of accessing extension values in resources which are re {{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|parseExtension}} ``` -# Custom Resource Types +# Custom Resource Structures -The most elegant way of adding extensions to a resource is through the use of custom fields. The following example shows a custom type which extends the FHIR Patient resource definition through two extensions. +All of the examples on this page show how to work with the existing data model classes. -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatient.java|patientDef}} -``` - -Using this custom type is as simple as instantiating the type and working with the new fields. - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientUse}} -``` - -This example produces the following output: - -```xml - - - - - - - - - - - - - - - - - -``` - -Parsing messages using your new custom type is equally simple. These types can also be used as method return types in clients and servers. - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientParse}} -``` - -## Using Custom Types in a Client - -If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type. - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}} -``` - -You may also explicitly use custom types in searches and other operations which return resources. - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch}} -``` - -You can also explicitly declare a preferred response resource custom type. This is useful for some operations that do not otherwise declare their resource types in the method signature. - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch2}} -``` - -## Using Multiple Custom Types in a Client - -Sometimes you may not know in advance exactly which type you will be receiving. For example, there are Patient resources which conform to several different profiles on a server and you aren't sure which profile you will get back for a specific read, you can declare the "primary" type for a given profile. - -This is declared at the FhirContext level, and will apply to any clients created from this context (including clients created before the default was set). - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientDeclared}} -``` -## Using Custom Types in a Server - -If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type. - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}} -``` - -## Custom Type Examples: Composite Extensions - -The following example shows a resource containing a composite extension. - -```java -{{snippet:classpath:/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java|resource}} -``` - -This could be used to create a resource such as the following: - -```xml - - - - - - - - - - - -``` +This is a great way to work with extensions, and most HAPI FHIR applications use the techniques described on this page. However, there is a more advanced technique available as well, involving the creation of custom Java classes that extend the built-in classes to add statically bound extensions (as oppoed to the dynamically bound ones shown on this page). See [Custom Structures](./custom_structures.html) for more information. diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomPatient.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomPatient.java new file mode 100644 index 00000000000..81f54b917c9 --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomPatient.java @@ -0,0 +1,29 @@ +package ca.uhn.fhir.narrative; + +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.model.api.annotation.Extension; +import ca.uhn.fhir.model.api.annotation.ResourceDef; +import ca.uhn.fhir.util.ElementUtil; +import org.hl7.fhir.r4.model.Patient; + +@ResourceDef(profile = "http://custom_patient") +public class CustomPatient extends Patient { + + @Child(name = "favouritePizzaExtension") + @Extension(url = "http://example.com/favourite_pizza") + private FavouritePizzaExtension myFavouritePizza; + + public FavouritePizzaExtension getFavouritePizza() { + return myFavouritePizza; + } + + public void setFavouritePizza(FavouritePizzaExtension theFavouritePizza) { + myFavouritePizza = theFavouritePizza; + } + + @Override + public boolean isEmpty() { + return super.isEmpty() && ElementUtil.isEmpty(myFavouritePizza); + } + +} diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java new file mode 100644 index 00000000000..6b355c7332e --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java @@ -0,0 +1,99 @@ +package ca.uhn.fhir.narrative; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.util.TestUtil; +import org.hl7.fhir.r4.model.Practitioner; +import org.hl7.fhir.r4.model.Quantity; +import org.hl7.fhir.r4.model.StringType; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class CustomThymeleafNarrativeGeneratorR4Test { + + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorR4Test.class); + + /** Don't use cached here since we modify the context */ + private FhirContext myCtx = FhirContext.forR4(); + + /** + * Implement narrative for standard type + */ + @Test + public void testStandardType() { + + CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/standardtypes_r4.properties"); + myCtx.setNarrativeGenerator(gen); + + Practitioner p = new Practitioner(); + p.addIdentifier().setSystem("sys").setValue("val1"); + p.addIdentifier().setSystem("sys").setValue("val2"); + p.addAddress().addLine("line1").addLine("line2"); + p.addName().setFamily("fam1").addGiven("given"); + + gen.populateResourceNarrative(myCtx, p); + + String actual = p.getText().getDiv().getValueAsString(); + ourLog.info(actual); + + assertThat(actual, containsString("

    Name

    given FAM1

    Address

    line1
    line2
    ")); + + } + + @Test + public void testCustomType() { + + CustomPatient patient = new CustomPatient(); + patient.setActive(true); + FavouritePizzaExtension parentExtension = new FavouritePizzaExtension(); + parentExtension.setToppings(new StringType("Mushrooms, Onions")); + parentExtension.setSize(new Quantity(null, 14, "http://unitsofmeasure", "[in_i]", "Inches")); + patient.setFavouritePizza(parentExtension); + + String output = myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); + ourLog.info("Encoded: {}", output); + + String expectedEncoding = "{\n" + + " \"resourceType\": \"Patient\",\n" + + " \"meta\": {\n" + + " \"profile\": [ \"http://custom_patient\" ]\n" + + " },\n" + + " \"extension\": [ {\n" + + " \"url\": \"http://example.com/favourite_pizza\",\n" + + " \"extension\": [ {\n" + + " \"url\": \"toppings\",\n" + + " \"valueString\": \"Mushrooms, Onions\"\n" + + " }, {\n" + + " \"url\": \"size\",\n" + + " \"valueQuantity\": {\n" + + " \"value\": 14,\n" + + " \"unit\": \"Inches\",\n" + + " \"system\": \"http://unitsofmeasure\",\n" + + " \"code\": \"[in_i]\"\n" + + " }\n" + + " } ]\n" + + " } ],\n" + + " \"active\": true\n" + + "}"; + assertEquals(expectedEncoding, output); + + CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customtypes_r4.properties"); + myCtx.setNarrativeGenerator(gen); + gen.populateResourceNarrative(myCtx, patient); + + String actual = patient.getText().getDiv().getValueAsString(); + ourLog.info(actual); + + String expected = "

    CustomPatient

    Favourite Pizza

    Toppings: Mushrooms, Onions Size: 14
    "; + assertEquals(expected, actual); + + } + + @AfterAll + public static void afterClassClearContext() { + TestUtil.clearAllStaticFieldsForUnitTest(); + } +} diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java index e06495865f6..433b448a448 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.util.TestUtil; import org.hamcrest.core.StringContains; @@ -21,7 +22,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class DefaultThymeleafNarrativeGeneratorR4Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorR4Test.class); - private static FhirContext ourCtx = FhirContext.forR4(); + private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); private DefaultThymeleafNarrativeGenerator myGen; @BeforeEach @@ -29,7 +30,7 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { myGen = new DefaultThymeleafNarrativeGenerator(); myGen.setUseHapiServerConformanceNarrative(true); - ourCtx.setNarrativeGenerator(myGen); + myCtx.setNarrativeGenerator(myGen); } @Test @@ -44,7 +45,7 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { value.setBirthDate(new Date()); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, StringContains.containsString("
    joe john BLOW
    ")); @@ -60,7 +61,7 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { value.addResult().setReference("Observation/2"); value.addResult().setReference("Observation/3"); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -82,13 +83,13 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { ""; //@formatter:on - OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, parse); + OperationOutcome oo = myCtx.newXmlParser().parseResource(OperationOutcome.class, parse); // String output = gen.generateTitle(oo); // ourLog.info(output); // assertEquals("Operation Outcome (2 issues)", output); - myGen.populateResourceNarrative(ourCtx, oo); + myGen.populateResourceNarrative(myCtx, oo); String output = oo.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -126,7 +127,7 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { value.addResult().setResource(obs); } - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -189,8 +190,8 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { " }"; - DiagnosticReport value = ourCtx.newJsonParser().parseResource(DiagnosticReport.class, input); - myGen.populateResourceNarrative(ourCtx, value); + DiagnosticReport value = myCtx.newJsonParser().parseResource(DiagnosticReport.class, input); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -210,7 +211,7 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { mp.setStatus(MedicationRequestStatus.ACTIVE); mp.setAuthoredOnElement(new DateTimeType("2014-09-01")); - myGen.populateResourceNarrative(ourCtx, mp); + myGen.populateResourceNarrative(myCtx, mp); String output = mp.getText().getDiv().getValueAsString(); assertTrue(output.contains("ciprofloaxin"), "Expected medication name of ciprofloaxin within narrative: " + output); @@ -223,7 +224,7 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { Medication med = new Medication(); med.getCode().setText("ciproflaxin"); - myGen.populateResourceNarrative(ourCtx, med); + myGen.populateResourceNarrative(myCtx, med); String output = med.getText().getDiv().getValueAsString(); assertThat(output, containsString("ciproflaxin")); diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/FavouritePizzaExtension.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/FavouritePizzaExtension.java new file mode 100644 index 00000000000..e0422b1f400 --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/FavouritePizzaExtension.java @@ -0,0 +1,45 @@ +package ca.uhn.fhir.narrative; + +import ca.uhn.fhir.model.api.annotation.Block; +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.util.ElementUtil; +import org.hl7.fhir.r4.model.BackboneElement; +import org.hl7.fhir.r4.model.Quantity; +import org.hl7.fhir.r4.model.StringType; + +@Block +public class FavouritePizzaExtension extends BackboneElement { + + @Child(name = "childBazExtension") + @ca.uhn.fhir.model.api.annotation.Extension(url = "toppings") + private StringType myToppings; + @Child(name = "childBarExtension") + @ca.uhn.fhir.model.api.annotation.Extension(url = "size") + private Quantity mySize; + + @Override + public BackboneElement copy() { + return null; + } + + @Override + public boolean isEmpty() { + return super.isEmpty() && ElementUtil.isEmpty(myToppings, mySize); + } + + public StringType getToppings() { + return myToppings; + } + + public void setToppings(StringType theToppings) { + myToppings = theToppings; + } + + public Quantity getSize() { + return mySize; + } + + public void setSize(Quantity theSize) { + mySize = theSize; + } +} diff --git a/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_CustomPatientR4.html b/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_CustomPatientR4.html new file mode 100644 index 00000000000..de58b90390c --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_CustomPatientR4.html @@ -0,0 +1,6 @@ +
    +

    CustomPatient

    + +
    + +
    diff --git a/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_FavouritePizzaExtensionR4.html b/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_FavouritePizzaExtensionR4.html new file mode 100644 index 00000000000..45a4ccaa596 --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_FavouritePizzaExtensionR4.html @@ -0,0 +1,10 @@ +
    +

    Favourite Pizza

    + + Toppings: + + + Size: + + +
    diff --git a/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_r4.properties b/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_r4.properties new file mode 100644 index 00000000000..9195f847039 --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/resources/narrative/customtypes_r4.properties @@ -0,0 +1,15 @@ + +# Each resource to be defined has a pair or properties. +# +# The first (name.class) defines the class name of the +# resource to define a template for +# +# The second (name.narrative) defines the path/classpath to the +# template file. +# Format is file:/path/foo.html or classpath:/com/classpath/foo.html +# +custompatient.class=ca.uhn.fhir.narrative.CustomPatient +custompatient.narrative=classpath:narrative/customtypes_CustomPatientR4.html + +favourite_pizza.class=ca.uhn.fhir.narrative.FavouritePizzaExtension +favourite_pizza.narrative=classpath:narrative/customtypes_FavouritePizzaExtensionR4.html diff --git a/hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_PractitionerR4.html b/hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_PractitionerR4.html new file mode 100644 index 00000000000..dc0bcd9a62b --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_PractitionerR4.html @@ -0,0 +1,24 @@ +
    + +
    + + +

    Name

    +
    + +

    Address

    +
    + + +
    diff --git a/hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_r4.properties b/hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_r4.properties new file mode 100644 index 00000000000..35dc55b6996 --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/resources/narrative/standardtypes_r4.properties @@ -0,0 +1,19 @@ + +# Each resource to be defined has a pair or properties. +# +# The first (name.class) defines the class name of the +# resource to define a template for +# +# The second (name.narrative) defines the path/classpath to the +# template file. +# Format is file:/path/foo.html or classpath:/com/classpath/foo.html +# +practitioner.resourceType=Practitioner +practitioner.narrative=classpath:narrative/standardtypes_PractitionerR4.html + +# You may also override/define behaviour for datatypes +humanname.dataType=HumanName +humanname.narrative=classpath:ca/uhn/fhir/narrative/datatype/HumanNameDt.html + +address.dataType=Address +address.narrative=classpath:ca/uhn/fhir/narrative/datatype/AddressDt.html From 3637dfc60f8c884db2048b7a48b601e01d398793 Mon Sep 17 00:00:00 2001 From: jamesagnew Date: Mon, 12 Apr 2021 09:57:08 -0400 Subject: [PATCH 35/61] License headers --- .../MdmSearchExpandingInterceptor.java | 2 +- .../jpa/search/helper/SearchParamHelper.java | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptor.java index 63f3d3574bc..4516e342a14 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/MdmSearchExpandingInterceptor.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.interceptor; /*- * #%L - * HAPI FHIR - Server Framework + * HAPI FHIR JPA Server * %% * Copyright (C) 2014 - 2021 Smile CDR, Inc. * %% diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java index 8b18d6faba8..b91e3b5d776 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/helper/SearchParamHelper.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.search.helper; +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; From 129df5dc1439efac52b0ba662a861070b3ad040b Mon Sep 17 00:00:00 2001 From: Kevin Dougan SmileCDR <72025369+KevinDougan-SmileCDR@users.noreply.github.com> Date: Tue, 13 Apr 2021 05:28:39 -0400 Subject: [PATCH 36/61] 2533 - Fix Issue with Reference Resources Not Being Returned In Search Queries (#2539) * 2533 - Initial commit to show how I might fix this Bug but not ready for merge at this time as it causes some other Unit Test failures in FhirResourceDaoR4VersionedReferenceTest. * Fix up test * 2533 - Cleaned up Unit Test to make it ready to merge. Co-authored-by: jamesagnew --- .../jpa/search/builder/SearchBuilder.java | 2 +- ...irResourceDaoR4VersionedReferenceTest.java | 47 +++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index fde8a54dc16..230bdefa531 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -643,8 +643,8 @@ public class SearchBuilder implements ISearchBuilder { */ if (resourcePidToVersion != null) { Long version = resourcePidToVersion.get(next.getResourceId()); + resourceId.setVersion(version); if (version != null && !version.equals(next.getVersion())) { - resourceId.setVersion(version); IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); next = dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java index 7ee179f503e..bda53b10805 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java @@ -8,14 +8,21 @@ import ca.uhn.fhir.util.BundleBuilder; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Bundle; +import org.hl7.fhir.r4.model.Condition; import org.hl7.fhir.r4.model.Encounter; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; +import org.hl7.fhir.r4.model.Reference; +import org.hl7.fhir.r4.model.Task; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; +import java.util.Arrays; +import java.util.Date; +import java.util.HashSet; import java.util.List; +import java.util.stream.Collectors; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -397,6 +404,46 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test { } + @Test + public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { + HashSet refPaths = new HashSet(); + refPaths.add("Task.basedOn"); + myFhirCtx.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); + myModelConfig.setRespectVersionsForSearchIncludes(true); + myFhirCtx.getParserOptions().setStripVersionsFromReferences(false); + + // Create a Condition + Condition condition = new Condition(); + IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + + // Create a Task which is basedOn that Condition + Task task = new Task(); + task.setBasedOn(Arrays.asList(new Reference(conditionId))); + IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + + // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + assertEquals(2, outcome.size()); + List resources = outcome.getResources(0, 2); + assertEquals(2, resources.size(), resources.stream().map(t->t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))); + assertEquals(taskId.getValue(), resources.get(0).getIdElement().getValue()); + assertEquals(conditionId.getValue(), ((Task)resources.get(0)).getBasedOn().get(0).getReference()); + assertEquals(conditionId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue()); + + // Now, update the Condition to generate another version of it + condition.setRecordedDate(new Date(System.currentTimeMillis())); + String conditionIdString = myConditionDao.update(condition).getId().getValue(); + + // Search for the Task again and make sure that we get the original version of the Condition resource in the Response + outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + assertEquals(2, outcome.size()); + resources = outcome.getResources(0, 2); + assertEquals(2, resources.size()); + assertEquals(taskId.getValue(), resources.get(0).getIdElement().getValue()); + assertEquals(conditionId.getValue(), ((Task)resources.get(0)).getBasedOn().get(0).getReference()); + assertEquals(conditionId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue()); + } + @Test public void testSearchAndIncludeUnersionedReference_Asynchronous() { From 5a3f2e3edfcb6c0184d3a25ad36fab9951d898ef Mon Sep 17 00:00:00 2001 From: ianmarshall Date: Tue, 13 Apr 2021 22:22:06 -0400 Subject: [PATCH 37/61] Change package loader to not generate snapshot for logical StructureDefinition resources. --- .../jpa/packages/PackageInstallerSvcImpl.java | 10 ++++- .../ca/uhn/fhir/jpa/packages/NpmR4Test.java | 36 ++++++++++++++++++ .../test-logical-structuredefinition.tgz | Bin 0 -> 4711 bytes 3 files changed, 44 insertions(+), 2 deletions(-) create mode 100644 hapi-fhir-jpaserver-base/src/test/resources/packages/test-logical-structuredefinition.tgz diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java index b8cfc721732..beb787dc32e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java @@ -404,9 +404,15 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { } private boolean isStructureDefinitionWithoutSnapshot(IBaseResource r) { + boolean retVal = false; FhirTerser terser = myFhirContext.newTerser(); - return r.getClass().getSimpleName().equals("StructureDefinition") && - terser.getSingleValueOrNull(r, "snapshot") == null; + if (r.getClass().getSimpleName().equals("StructureDefinition")) { + Optional kind = terser.getSinglePrimitiveValue(r, "kind"); + if (kind.isPresent() && !(kind.get().equals("logical"))) { + retVal = terser.getSingleValueOrNull(r, "snapshot") == null; + } + } + return retVal; } private IBaseResource generateSnapshot(IBaseResource sd) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java index 05940239896..ec088316919 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java @@ -69,7 +69,9 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.not; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -734,6 +736,40 @@ public class NpmR4Test extends BaseJpaR4Test { }); } + @Test + public void testInstallPkgContainingLogicalStructureDefinition() throws Exception { + myDaoConfig.setAllowExternalReferences(true); + + byte[] bytes = loadClasspathBytes("/packages/test-logical-structuredefinition.tgz"); + myFakeNpmServlet.myResponses.put("/test-logical-structuredefinition/1.0.0", bytes); + + PackageInstallationSpec spec = new PackageInstallationSpec().setName("test-logical-structuredefinition").setVersion("1.0.0").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL); + PackageInstallOutcomeJson outcome = myPackageInstallerSvc.install(spec); + assertEquals(2, outcome.getResourcesInstalled().get("StructureDefinition")); + + // Be sure no further communication with the server + JettyUtil.closeServer(myServer); + + // Search for the installed resource + runInTransaction(() -> { + // Confirm that Laborbefund (a logical StructureDefinition) was created without a snapshot. + SearchParameterMap map = SearchParameterMap.newSynchronous(); + map.add(StructureDefinition.SP_URL, new UriParam("https://www.medizininformatik-initiative.de/fhir/core/modul-labor/StructureDefinition/LogicalModel/Laborbefund")); + IBundleProvider result = myStructureDefinitionDao.search(map); + assertEquals(1, result.sizeOrThrowNpe()); + List resources = result.getResources(0,1); + assertFalse(((StructureDefinition)resources.get(0)).hasSnapshot()); + + // Confirm that DiagnosticLab (a resource StructureDefinition with differential but no snapshot) was created with a generated snapshot. + map = SearchParameterMap.newSynchronous(); + map.add(StructureDefinition.SP_URL, new UriParam("https://www.medizininformatik-initiative.de/fhir/core/modul-labor/StructureDefinition/DiagnosticReportLab")); + result = myStructureDefinitionDao.search(map); + assertEquals(1, result.sizeOrThrowNpe()); + resources = result.getResources(0,1); + assertTrue(((StructureDefinition)resources.get(0)).hasSnapshot()); + + }); + } static class FakeNpmServlet extends HttpServlet { diff --git a/hapi-fhir-jpaserver-base/src/test/resources/packages/test-logical-structuredefinition.tgz b/hapi-fhir-jpaserver-base/src/test/resources/packages/test-logical-structuredefinition.tgz new file mode 100644 index 0000000000000000000000000000000000000000..035b48ff77167ca3f2cd97e67a54a9ce7c2f8bd1 GIT binary patch literal 4711 zcmV-t5}55DiwFRQPIh1b1MNLsZyU*xly48Ww@H9~x!@4oLxVv8Zy=6_AEIpS9b(C{ zcXgI*pXJR4x9i&s$tF3=VGp-wMv}EK;D`K(ye)7r?)_f&DGvwe59A?FdB|_M>aY3c zd{81y+0%gBqL`r43)29lgEX z>Fl)h7SfxY)^@9g^d-{d7te7G3Z$3@nHtm?8PU?_Xme8^QB`>f2Mzn9VG# zedig}Ag)Pp?Yk{qztjA*t9OU3PVdgA_s7(p;!pSX?sT^A-)%J>w)BUM&iziivH#$H zv$40=>9#t}cBlEUeGNi+f67zi{Bvv!Fx^_7AIP(Rz1@x3e^=MLHFUlNS`y_wxBs?| zrmi`~|JLj_@3gyJ{Z313wzpvb?zEd9y>05Mvg{(QeruV1#$WqJJ2?5~#I@=5&j28gZl^PU{F^&s{5#D~x6|F>{=eC+;=h%YBJtnDnu!1S zgsdR$eLN3kDE>QFt*Ww~McIE42VTD)$g_VH{{{9B(?z?A|5i~V@t>}5xAnHx-08Nu z-MigR75}~S#h?F|Hv8w;T`H)z&sY&!q2ChFR+(m_9ze~EG{ zj8EdIN>!>-m8w*wD(hV~MD^iM-YywcsY+GWurU9CWPMN8uSLBf-`|q;2cb3}%Noi0 zo~&PsdPBn9lJy6&{!rE*%Noi0o~&PsS~{%_x%k+SJ#WYr%7$EpY#>?RE5&kEs`B2H zzZ2ERZ2o^xo4fw6N>!@z4wsD&4o(jCYyN^D2>^e;4}E@Jn;!(+63N1U4z)$Hz9;L~ zqOJmgs#K*aRatcj&i}|y+P98PuD1`i?k>+H)5)!pdsXDAf^w|92*ZmXqlH(~y7cRJgZ{ja3l@c94i zP{jYg`QRbX{#EgRxuWcUdDfq2|D9H+y%V>8cvji}D$4hDiXGddef<383}S$K%HY>G z>mIe>-Pmf01=P3?haH7urJ zTPfJLjzqk1Y!9#%)VYu0pCTOQjY?`$bi>?$3~xZ4o^Z61Uwdj!)ASA0{CuS zhBxrQ=s}1Bjq1BY2o&&U-4!BOjLlTmB82t~CoEOb5%e>*b~Kxg1gJxfB#dG|^unerhz(1?DMX$^fhp^o+C$S~?+cH5sQrsMES?C}Pi4)2 zr&tmX61?nkwF+tFsZFiRH{Q@3q1(jnj}ZjYV;2F0VWCsdw*edOO~l`Q5gT34aa=rx zpnMZu{sDA_isug4o@g+={BdLf!cV@YUlW1z_5(5y#B3tRwk%L3(d876DH`F+Hw=-B z&M*a$XrY!9n^JSc$N^G~{&;|=2t!yPh{Z@c0W_r89N`h}0V!u5VA(`dPAPJ!?Ow2E zoVYN_AO@)H>6EpRJr^2xUM)1@yyd*Heq;d{vlC|@Uw9)2f+TO6KJ@cIl_vZrHr<&= zCNPL#rHp$RQV{eMI}QfvJpf@5Xm1Q%WKPjjYJ=dwu)+DT;lkQTLwg|Lm+r(S7oeq- zo6jd8RCfwS%*Gec(gY#^185K^{OioX_l`C6B_@Oe`&997qEmq59swCdji2cN*=Hsl zFrpn$<^#qZf~n$+tp#018XzFx)4|nnz#eF%HFTzzZ!UiTtq!3#$p|nr+PM5c24|OV z#ugx9{_pGhXP1AnTyr!5(u5Sht}_AvE$Anp5*-7FCajV{a$G77qjJjmftQxy4MD9b zuug?roLv4$NKq%pV&UiJbU}^sTjy#O6{>Qf1|dhYQot+iS~uL23;De!|=rP1)XkvhsmiRv-5v|wr`9X=a}o@s%cmCHp3n8oBh;kMDh2xuN*7n>l=bugm?d*U&o zxTa>g=5)Xm(=muJ9guFQJsQaNe9q%CSquK7Mx4%i0!DGoUjWCT$pfy%JnrOPBQ=6# zS_$r^tp9eLj5ShQ)HP{6H8$Uq$a#Ic5Q5nl~jOcY^g#XZbNB}aZq zfpv`5M>eK2@HZ~DEL@1Ycnihhkv)OclEd8qXZ6wLj{zVuLLrOv_mn$5G59Pv5vz|a zWb}fi2=n=CB`zLg7_^|M0%#>Bwm5U~xy#}@CL{#{K}et-krnFJ7ULDMMujBdxD zLCy?cfbVA!91-YYvUD8?%C&usxt4vHYgH)y`Q;lJBb(*M!0iZC{ET|D8RoueVlkXB zF=I-42wOo?4C&)w?FSSKH)v`l+UAZC?5xQI5~j3SS(X;CQpb|yDu%G z!1yRwHAw7hkUE(e6e3;*jcy-3IedKot!4(2#21NL3bdM+45N{uUzboO$xv2$ECV`` zFrz8vd3)f|{um*o(>{0QV=elt^4e-`rnB61fjy z_16;^VByc)h{zan&l7evo9Nh_fQL8LSC|mT(hFg?zhrkHnS@Dc0Freiupqmz*5n~k z@_Q6Nom@113#fSF& zDC+#}|^ATpWFU58XplJ#)7pO*zI(K-Q+iDfEG?(Y<0^gl#C;N@bB`mIZ>DcP49#csJZWxQ1o z^MqgUfmfn|sQ3{kgnJfDK8a_4U@0!no9tr-tz3^XH3vHy`jYK&Irf=7wh*k)D0+r@ zKyx7jtDb5qPox+?t^j0#k?29qYR~pgo;My@2J_;fjllH^1ai`F*$sktXDYo>WJZtW zt!_*zF6ww-MjDsM|IBHtL^ZL=1e7Y%cbL^_ip1-i-pKV z1(j^35;lqFi85@9z zojp4Sy`LHuQ&`aGJ&Q$L>`>KbERkhlb^=YNb@+ib&~W%jJSuU<`ML0HIw1Uznm7i> zgSR-8JkdPD&WuwY9Ar*1CC(s<&}Ss3G_h;SXuiAstJpHh2P~8wF<5qT+vevT)v-ck;ep8o&$XN% zW+3PZHACwL6Bj<+x^P$)<3@Y_a<-tt%BFsL^W}_7IR;fMm9;YO7U$s!d=MkHs1za? z{29T2m?{%=+vY#)HLOk4!)8Lo2(rhZR{@q{`&=teZNz24*K}7CXB35!XXxow1R6rK zJ`y$9uYbg^Co+TfH}gZ!%{X*7>2p^)MX+pC&faNK^VOYk6`x8BIYFd z>6svU!SOTb*YCPQ6c=AXW;U9CG$TdK>$8pw^OSo!BNcew;f%5zFVj_r2`{|ul-X!~ zcb(>wop;G z2Ezj5c*R=*%isUi+SYff`+rtaZutFQfBScF|JOJF^I!kM>gxWla>c*@cWKt2XaCKv z-rkPee_Pip`(H&_gZsZyRIWbv>aTQQ*L2w|Uq3!<9QstChRlix)rPO}#`Uzz!|?eG z0u&y10M9@#hB(6OS*8OaNlXaG2_YPYg}^sCLwBOq55%n-!q~$3ZL#wi3ZK+QWM&U6 z!tvy4FMI`B3a-dqXnmV7B1~~hTHZZqacr6yx(_Y=XR!-`>-G$j`YX9)Xf80Cjo&ZK-;_T1=M_`Xg0c;+&Z!a9iy7i8(YbX+_Q zED>5jd-f}_mzuIlhT(+6ZJo9n?0WwCrdn$~JUlw$`MxU&my@nZvI_GWG$^7jr-tQ) z1SuI`mb!!q+y#rVF#T<7QPz|`h492RY6;Y8Qbc8m4%#UqhvqpRd_m0x)a?4X>PNPv zit;2|ML4=2GJz{nT1M0f@m{Itm|0|XtU#}&U&C?C5h*w$grQaM@vM(LEYVNNgH4(iXm7tHnK@m{ zuF4@S;2a}RZ#^^*krNoTpG7FnM8YllIIdsf02bf3KseK#yfK7^OPMi z7I!0uTW8(=eBp^k3*7=8y@dJNFBVT(LHMUcN~)jfI3CW-fhm9mJoF4xW>J~C9fny1 zwMN7Co!3bOdZF$Lr99_=m+^t@{{WfdtYu{gWC6s5$$9^8X9d$8Z3y008=>K4Sm? literal 0 HcmV?d00001 From 0cb95ad55632db7dbc034e394a683cd945b4f16a Mon Sep 17 00:00:00 2001 From: ianmarshall Date: Tue, 13 Apr 2021 22:30:22 -0400 Subject: [PATCH 38/61] Change package loader to not generate snapshot for logical StructureDefinition resources. --- ...not-create-snapshot-for-logical-structuredefinition.yaml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2535-do-not-create-snapshot-for-logical-structuredefinition.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2535-do-not-create-snapshot-for-logical-structuredefinition.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2535-do-not-create-snapshot-for-logical-structuredefinition.yaml new file mode 100644 index 00000000000..6f3e83a7a3b --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2535-do-not-create-snapshot-for-logical-structuredefinition.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 2535 +title: "An issue with package installer involving logical StructureDefinition resources was fixed. Package registry will no + longer attempt to generate a snapshot for logical StructureDefinition resources if one is not already provided in the + resource definition." From b45ddcc3da1f03cfb07d38bcdbac8b62fe7a0f71 Mon Sep 17 00:00:00 2001 From: Kevin Dougan SmileCDR <72025369+KevinDougan-SmileCDR@users.noreply.github.com> Date: Wed, 14 Apr 2021 08:45:49 -0400 Subject: [PATCH 39/61] 2543 - Fix issue where versionned references are not being returned properly. (#2544) * 2543 - Fix issue where versionned references are not being returned properly. * 2543 - Added changelog entries for this fix plus a previous fix for 2533. --- ...-not-being-returned-in-search-queries.yaml | 6 ++ ...ences-are-not-being-returned-properly.yaml | 8 ++ ...irResourceDaoR4VersionedReferenceTest.java | 78 +++++++++++++++++++ .../fhir/jpa/model/entity/ResourceLink.java | 4 + 4 files changed, 96 insertions(+) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml new file mode 100644 index 00000000000..fae2c3de9a6 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 2533 +title: "When issuing a request for a specific Resource and also specifying an _include param, +the referenced resource is not returned when there is only 1 version of the referenced resource available. +When there are more than 1 versions available, the referenced resource is returned in the response bundle." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml new file mode 100644 index 00000000000..aa180bb0423 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml @@ -0,0 +1,8 @@ +--- +type: fix +issue: 2543 +title: "When issuing a request for a specific Resource and also specifying an _include param, +the proper historical referenced resource is not returned when there are more than 1 versions of the +referenced resource available, after the reference has been changed from the original version 1 to some other version. +When there are more than 1 versions available, and the referring resource had previously referred to version 1 +but now refers to version 4, the resource returned in the response bundle is for version 1." diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java index bda53b10805..ec689d1a673 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java @@ -444,6 +444,84 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test { assertEquals(conditionId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue()); } + @Test + public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() { + HashSet refPaths = new HashSet(); + refPaths.add("Task.basedOn"); + myFhirCtx.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); + myModelConfig.setRespectVersionsForSearchIncludes(true); + myFhirCtx.getParserOptions().setStripVersionsFromReferences(false); + + // Create a Condition + Condition condition = new Condition(); + IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + + // Now, update the Condition 3 times to generate a 4th version of it + condition.setRecordedDate(new Date(System.currentTimeMillis())); + conditionId = myConditionDao.update(condition).getId(); + condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000)); + conditionId = myConditionDao.update(condition).getId(); + condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000)); + conditionId = myConditionDao.update(condition).getId(); + + // Create a Task which is basedOn that Condition + Task task = new Task(); + task.setBasedOn(Arrays.asList(new Reference(conditionId))); + IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + + // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + assertEquals(2, outcome.size()); + List resources = outcome.getResources(0, 2); + assertEquals(2, resources.size(), resources.stream().map(t->t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))); + assertEquals(taskId.getValue(), resources.get(0).getIdElement().getValue()); + assertEquals(conditionId.getValue(), ((Task)resources.get(0)).getBasedOn().get(0).getReference()); + assertEquals(conditionId.withVersion("4").getValue(), resources.get(1).getIdElement().getValue()); + } + + @Test + public void testSearchAndIncludeVersionedReference_WhenPreviouslyReferencedVersionOne() { + HashSet refPaths = new HashSet(); + refPaths.add("Task.basedOn"); + myFhirCtx.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); + myModelConfig.setRespectVersionsForSearchIncludes(true); + myFhirCtx.getParserOptions().setStripVersionsFromReferences(false); + + // Create a Condition + Condition condition = new Condition(); + IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + ourLog.info("conditionId: \n{}", conditionId); + + // Create a Task which is basedOn that Condition + Task task = new Task(); + task.setBasedOn(Arrays.asList(new Reference(conditionId))); + IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + + // Now, update the Condition 3 times to generate a 4th version of it + condition.setRecordedDate(new Date(System.currentTimeMillis())); + conditionId = myConditionDao.update(condition).getId(); + ourLog.info("UPDATED conditionId: \n{}", conditionId); + condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000)); + conditionId = myConditionDao.update(condition).getId(); + ourLog.info("UPDATED conditionId: \n{}", conditionId); + condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000)); + conditionId = myConditionDao.update(condition).getId(); + ourLog.info("UPDATED conditionId: \n{}", conditionId); + + // Now, update the Task to refer to the latest version 4 of the Condition + task.setBasedOn(Arrays.asList(new Reference(conditionId))); + taskId = myTaskDao.update(task).getId(); + ourLog.info("UPDATED taskId: \n{}", taskId); + + // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + assertEquals(2, outcome.size()); + List resources = outcome.getResources(0, 2); + assertEquals(2, resources.size(), resources.stream().map(t->t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))); + assertEquals(taskId.getValue(), resources.get(0).getIdElement().getValue()); + assertEquals(conditionId.getValue(), ((Task)resources.get(0)).getBasedOn().get(0).getReference()); + assertEquals(conditionId.withVersion("4").getValue(), resources.get(1).getIdElement().getValue()); + } @Test public void testSearchAndIncludeUnersionedReference_Asynchronous() { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java index 98cf2561bfd..581c0de16b0 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java @@ -138,6 +138,7 @@ public class ResourceLink extends BaseResourceIndex { b.append(mySourceResource, obj.mySourceResource); b.append(myTargetResourceUrl, obj.myTargetResourceUrl); b.append(myTargetResourceType, obj.myTargetResourceType); + b.append(myTargetResourceVersion, obj.myTargetResourceVersion); b.append(getTargetResourceId(), obj.getTargetResourceId()); return b.isEquals(); } @@ -150,6 +151,7 @@ public class ResourceLink extends BaseResourceIndex { myTargetResourceId = source.getTargetResourceId(); myTargetResourcePid = source.getTargetResourcePid(); myTargetResourceType = source.getTargetResourceType(); + myTargetResourceVersion = source.getTargetResourceVersion(); myTargetResourceUrl = source.getTargetResourceUrl(); } @@ -244,6 +246,7 @@ public class ResourceLink extends BaseResourceIndex { b.append(mySourcePath); b.append(mySourceResource); b.append(myTargetResourceUrl); + b.append(myTargetResourceVersion); b.append(getTargetResourceType()); b.append(getTargetResourceId()); return b.toHashCode(); @@ -257,6 +260,7 @@ public class ResourceLink extends BaseResourceIndex { b.append(", src=").append(mySourceResourcePid); b.append(", target=").append(myTargetResourcePid); b.append(", targetType=").append(myTargetResourceType); + b.append(", targetVersion=").append(myTargetResourceVersion); b.append(", targetUrl=").append(myTargetResourceUrl); b.append("]"); From 550602b2f173efe8a8e33cb42322aacef7889658 Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Wed, 14 Apr 2021 13:15:30 -0400 Subject: [PATCH 40/61] added numeric matcher (#2547) * added numeric matcher * changelog * performance optimization * fix test --- .../fhir/context/phonetic/NumericEncoder.java | 18 +++++++++ .../context/phonetic/PhoneticEncoderEnum.java | 3 +- .../context/phonetic/PhoneticEncoderTest.java | 12 ++++-- .../5_4_0/2547-mdm-add-numeric-matcher.yaml | 5 +++ .../fhir/docs/server_jpa_mdm/mdm_rules.md | 12 +++++- ...esourceDaoDstu3PhoneticSearchNoFtTest.java | 32 +++++++++++++--- .../mdm/rules/matcher/MdmMatcherEnum.java | 3 +- .../mdm/rules/matcher/NumericMatcher.java | 16 ++++++++ .../rules/matcher/StringMatcherR4Test.java | 37 ++++++++++++------- 9 files changed, 110 insertions(+), 28 deletions(-) create mode 100644 hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2547-mdm-add-numeric-matcher.yaml create mode 100644 hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java new file mode 100644 index 00000000000..1619748d470 --- /dev/null +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java @@ -0,0 +1,18 @@ +package ca.uhn.fhir.context.phonetic; + +import com.google.common.base.CharMatcher; + +// Useful for numerical identifiers like phone numbers, address parts etc. +// This should not be used where decimals are important. A new "quantity encoder" should be added to handle cases like that. +public class NumericEncoder implements IPhoneticEncoder { + @Override + public String name() { + return "NUMERIC"; + } + + @Override + public String encode(String theString) { + // Remove everything but the numbers + return CharMatcher.inRange('0', '9').retainFrom(theString); + } +} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderEnum.java index 28549a71629..605a8ae24ca 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderEnum.java @@ -39,7 +39,8 @@ public enum PhoneticEncoderEnum { METAPHONE(new ApacheEncoder("METAPHONE", new Metaphone())), NYSIIS(new ApacheEncoder("NYSIIS", new Nysiis())), REFINED_SOUNDEX(new ApacheEncoder("REFINED_SOUNDEX", new RefinedSoundex())), - SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex())); + SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex())), + NUMERIC(new NumericEncoder()); private final IPhoneticEncoder myPhoneticEncoder; diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderTest.java index bca150978cb..e43327eb818 100644 --- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderTest.java +++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/context/phonetic/PhoneticEncoderTest.java @@ -1,14 +1,14 @@ package ca.uhn.fhir.context.phonetic; -import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.jupiter.api.Assertions.assertEquals; class PhoneticEncoderTest { private static final Logger ourLog = LoggerFactory.getLogger(PhoneticEncoderTest.class); @@ -23,7 +23,11 @@ class PhoneticEncoderTest { public void testEncodeAddress(PhoneticEncoderEnum thePhoneticEncoderEnum) { String encoded = thePhoneticEncoderEnum.getPhoneticEncoder().encode(ADDRESS_LINE); ourLog.info("{}: {}", thePhoneticEncoderEnum.name(), encoded); - assertThat(encoded, startsWith(NUMBER + " ")); - assertThat(encoded, endsWith(" " + SUITE)); + if (thePhoneticEncoderEnum == PhoneticEncoderEnum.NUMERIC) { + assertEquals(NUMBER + SUITE, encoded); + } else { + assertThat(encoded, startsWith(NUMBER + " ")); + assertThat(encoded, endsWith(" " + SUITE)); + } } } diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2547-mdm-add-numeric-matcher.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2547-mdm-add-numeric-matcher.yaml new file mode 100644 index 00000000000..24aace91fdf --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2547-mdm-add-numeric-matcher.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 2547 +title: "Added new NUMERIC mdm matcher for matching phone numbers. Also added NUMERIC phonetic encoder to support +adding NUMERIC encoded search parameter (e.g. if searching for matching phone numbers is required by mdm candidate searching)." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_rules.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_rules.md index a4fea7f82d0..3075b67543d 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_rules.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_mdm/mdm_rules.md @@ -292,10 +292,10 @@ The following algorithms are currently supported: Gail = Gael, Gail != Gale, Thomas != Tom - CAVERPHONE1 + CAVERPHONE2 matcher - Apache Caverphone1 + Apache Caverphone2 Gail = Gael, Gail = Gale, Thomas != Tom @@ -379,6 +379,14 @@ The following algorithms are currently supported: 2019-12,Month = 2019-12-19,Day + + NUMERIC + matcher + + Remove all non-numeric characters from the string before comparing. + + 4169671111 = (416) 967-1111 + NAME_ANY_ORDER matcher diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java index bda399dbd76..75b74c87aa8 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3PhoneticSearchNoFtTest.java @@ -1,12 +1,13 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.context.phonetic.ApacheEncoder; +import ca.uhn.fhir.context.phonetic.NumericEncoder; import ca.uhn.fhir.context.phonetic.PhoneticEncoderEnum; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.rest.param.StringParam; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.HapiExtensions; import org.apache.commons.codec.language.Soundex; import org.hl7.fhir.dstu3.model.Enumerations; @@ -35,10 +36,14 @@ public class FhirResourceDaoDstu3PhoneticSearchNoFtTest extends BaseJpaDstu3Test public static final String GAIL = "Gail"; public static final String NAME_SOUNDEX_SP = "nameSoundex"; public static final String ADDRESS_LINE_SOUNDEX_SP = "addressLineSoundex"; + public static final String PHONE_NUMBER_SP = "phoneNumber"; private static final String BOB = "BOB"; private static final String ADDRESS = "123 Nohili St"; private static final String ADDRESS_CLOSE = "123 Nohily St"; private static final String ADDRESS_FAR = "123 College St"; + private static final String PHONE = "4169671111"; + private static final String PHONE_CLOSE = "(416) 967-1111"; + private static final String PHONE_FAR = "416 421 0421"; @Autowired ISearchParamRegistry mySearchParamRegistry; @@ -49,8 +54,9 @@ public class FhirResourceDaoDstu3PhoneticSearchNoFtTest extends BaseJpaDstu3Test myDaoConfig.setReuseCachedSearchResultsForMillis(null); myDaoConfig.setFetchSizeDefaultMaximum(new DaoConfig().getFetchSizeDefaultMaximum()); - createSoundexSearchParameter(NAME_SOUNDEX_SP, PhoneticEncoderEnum.SOUNDEX, "Patient.name"); - createSoundexSearchParameter(ADDRESS_LINE_SOUNDEX_SP, PhoneticEncoderEnum.SOUNDEX, "Patient.address.line"); + createPhoneticSearchParameter(NAME_SOUNDEX_SP, PhoneticEncoderEnum.SOUNDEX, "Patient.name"); + createPhoneticSearchParameter(ADDRESS_LINE_SOUNDEX_SP, PhoneticEncoderEnum.SOUNDEX, "Patient.address.line"); + createPhoneticSearchParameter(PHONE_NUMBER_SP, PhoneticEncoderEnum.NUMERIC, "Patient.telecom"); mySearchParamRegistry.forceRefresh(); mySearchParamRegistry.setPhoneticEncoder(new ApacheEncoder(PhoneticEncoderEnum.SOUNDEX.name(), new Soundex())); } @@ -70,6 +76,15 @@ public class FhirResourceDaoDstu3PhoneticSearchNoFtTest extends BaseJpaDstu3Test ourLog.info("Encoded address: {}", soundex.encode(ADDRESS)); } + @Test + public void testNumeric() { + NumericEncoder numeric = new NumericEncoder(); + assertEquals(PHONE, numeric.encode(PHONE_CLOSE)); + assertEquals(PHONE, numeric.encode(PHONE)); + assertEquals(numeric.encode(PHONE), numeric.encode(PHONE_CLOSE)); + assertNotEquals(numeric.encode(PHONE), numeric.encode(PHONE_FAR)); + } + @Test public void phoneticMatch() { Patient patient; @@ -77,15 +92,16 @@ public class FhirResourceDaoDstu3PhoneticSearchNoFtTest extends BaseJpaDstu3Test patient = new Patient(); patient.addName().addGiven(GALE); patient.addAddress().addLine(ADDRESS); + patient.addTelecom().setValue(PHONE); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient)); IIdType pId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); List stringParams = myResourceIndexedSearchParamStringDao.findAll(); - assertThat(stringParams, hasSize(6)); + assertThat(stringParams, hasSize(7)); List stringParamNames = stringParams.stream().map(ResourceIndexedSearchParamString::getParamName).collect(Collectors.toList()); - assertThat(stringParamNames, containsInAnyOrder(Patient.SP_NAME, Patient.SP_GIVEN, Patient.SP_PHONETIC, NAME_SOUNDEX_SP, Patient.SP_ADDRESS, ADDRESS_LINE_SOUNDEX_SP)); + assertThat(stringParamNames, containsInAnyOrder(Patient.SP_NAME, Patient.SP_GIVEN, Patient.SP_PHONETIC, NAME_SOUNDEX_SP, Patient.SP_ADDRESS, ADDRESS_LINE_SOUNDEX_SP, PHONE_NUMBER_SP)); assertSearchMatch(pId, Patient.SP_PHONETIC, GALE); assertSearchMatch(pId, Patient.SP_PHONETIC, GAIL); @@ -98,6 +114,10 @@ public class FhirResourceDaoDstu3PhoneticSearchNoFtTest extends BaseJpaDstu3Test assertSearchMatch(pId, ADDRESS_LINE_SOUNDEX_SP, ADDRESS); assertSearchMatch(pId, ADDRESS_LINE_SOUNDEX_SP, ADDRESS_CLOSE); assertNoMatch(ADDRESS_LINE_SOUNDEX_SP, ADDRESS_FAR); + + assertSearchMatch(pId, PHONE_NUMBER_SP, PHONE); + assertSearchMatch(pId, PHONE_NUMBER_SP, PHONE_CLOSE); + assertNoMatch(PHONE_NUMBER_SP, PHONE_FAR); } private void assertSearchMatch(IIdType thePId1, String theSp, String theValue) { @@ -114,7 +134,7 @@ public class FhirResourceDaoDstu3PhoneticSearchNoFtTest extends BaseJpaDstu3Test assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), hasSize(0)); } - private void createSoundexSearchParameter(String theCode, PhoneticEncoderEnum theEncoder, String theFhirPath) { + private void createPhoneticSearchParameter(String theCode, PhoneticEncoderEnum theEncoder, String theFhirPath) { SearchParameter searchParameter = new SearchParameter(); searchParameter.addBase("Patient"); searchParameter.setCode(theCode); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherEnum.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherEnum.java index 458387d14d3..f29dad1827c 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherEnum.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherEnum.java @@ -51,7 +51,8 @@ public enum MdmMatcherEnum { IDENTIFIER(new IdentifierMatcher()), EMPTY_FIELD(new EmptyFieldMatcher()), - EXTENSION_ANY_ORDER(new ExtensionMatcher()); + EXTENSION_ANY_ORDER(new ExtensionMatcher()), + NUMERIC(new HapiStringMatcher(new NumericMatcher())); private final IMdmFieldMatcher myMdmFieldMatcher; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java new file mode 100644 index 00000000000..82bce7d59c0 --- /dev/null +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java @@ -0,0 +1,16 @@ +package ca.uhn.fhir.mdm.rules.matcher; + +import ca.uhn.fhir.context.phonetic.NumericEncoder; + +// Useful for numerical identifiers like phone numbers, address parts etc. +// This should not be used where decimals are important. A new "quantity matcher" should be added to handle cases like that. +public class NumericMatcher implements IMdmStringMatcher { + private final NumericEncoder encoder = new NumericEncoder(); + + @Override + public boolean matches(String theLeftString, String theRightString) { + String left = encoder.encode(theLeftString); + String right = encoder.encode(theRightString); + return left.equals(right); + } +} diff --git a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/matcher/StringMatcherR4Test.java b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/matcher/StringMatcherR4Test.java index 42508046adc..73aafb7aebe 100644 --- a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/matcher/StringMatcherR4Test.java +++ b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/rules/matcher/StringMatcherR4Test.java @@ -14,24 +14,33 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class StringMatcherR4Test extends BaseMatcherR4Test { private static final Logger ourLog = LoggerFactory.getLogger(StringMatcherR4Test.class); - public static final String LEFT = "namadega"; - public static final String RIGHT = "namaedga"; + public static final String LEFT_NAME = "namadega"; + public static final String RIGHT_NAME = "namaedga"; @Test public void testNamadega() { - assertTrue(match(MdmMatcherEnum.COLOGNE, LEFT, RIGHT)); - assertTrue(match(MdmMatcherEnum.DOUBLE_METAPHONE, LEFT, RIGHT)); - assertTrue(match(MdmMatcherEnum.MATCH_RATING_APPROACH, LEFT, RIGHT)); - assertTrue(match(MdmMatcherEnum.METAPHONE, LEFT, RIGHT)); - assertTrue(match(MdmMatcherEnum.SOUNDEX, LEFT, RIGHT)); - assertTrue(match(MdmMatcherEnum.METAPHONE, LEFT, RIGHT)); + String left = LEFT_NAME; + String right = RIGHT_NAME; + assertTrue(match(MdmMatcherEnum.COLOGNE, left, right)); + assertTrue(match(MdmMatcherEnum.DOUBLE_METAPHONE, left, right)); + assertTrue(match(MdmMatcherEnum.MATCH_RATING_APPROACH, left, right)); + assertTrue(match(MdmMatcherEnum.METAPHONE, left, right)); + assertTrue(match(MdmMatcherEnum.SOUNDEX, left, right)); + assertTrue(match(MdmMatcherEnum.METAPHONE, left, right)); - assertFalse(match(MdmMatcherEnum.CAVERPHONE1, LEFT, RIGHT)); - assertFalse(match(MdmMatcherEnum.CAVERPHONE2, LEFT, RIGHT)); - assertFalse(match(MdmMatcherEnum.NYSIIS, LEFT, RIGHT)); - assertFalse(match(MdmMatcherEnum.REFINED_SOUNDEX, LEFT, RIGHT)); - assertFalse(match(MdmMatcherEnum.STRING, LEFT, RIGHT)); - assertFalse(match(MdmMatcherEnum.SUBSTRING, LEFT, RIGHT)); + assertFalse(match(MdmMatcherEnum.CAVERPHONE1, left, right)); + assertFalse(match(MdmMatcherEnum.CAVERPHONE2, left, right)); + assertFalse(match(MdmMatcherEnum.NYSIIS, left, right)); + assertFalse(match(MdmMatcherEnum.REFINED_SOUNDEX, left, right)); + assertFalse(match(MdmMatcherEnum.STRING, left, right)); + assertFalse(match(MdmMatcherEnum.SUBSTRING, left, right)); + } + + @Test + public void testNumeric() { + assertTrue(match(MdmMatcherEnum.NUMERIC, "4169671111", "(416) 967-1111")); + assertFalse(match(MdmMatcherEnum.NUMERIC, "5169671111", "(416) 967-1111")); + assertFalse(match(MdmMatcherEnum.NUMERIC, "4169671111", "(416) 967-1111x123")); } @Test From 2ebc57b8a27ed0a13d6eead50d0b267c6d9fe2ee Mon Sep 17 00:00:00 2001 From: Kevin Dougan SmileCDR <72025369+KevinDougan-SmileCDR@users.noreply.github.com> Date: Wed, 14 Apr 2021 13:33:07 -0400 Subject: [PATCH 41/61] 2548 - Add backport changelog Tag to backported fixes for #2533 and #2543. (#2549) --- ...reference-resources-not-being-returned-in-search-queries.yaml | 1 + ...re-versionned-references-are-not-being-returned-properly.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml index fae2c3de9a6..5172ef0dbf5 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml @@ -4,3 +4,4 @@ issue: 2533 title: "When issuing a request for a specific Resource and also specifying an _include param, the referenced resource is not returned when there is only 1 version of the referenced resource available. When there are more than 1 versions available, the referenced resource is returned in the response bundle." +backport: 5.3.2 diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml index aa180bb0423..e9f7e2cfa7d 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml @@ -6,3 +6,4 @@ the proper historical referenced resource is not returned when there are more th referenced resource available, after the reference has been changed from the original version 1 to some other version. When there are more than 1 versions available, and the referring resource had previously referred to version 1 but now refers to version 4, the resource returned in the response bundle is for version 1." +backport: 5.3.2 From ca2088f3adf7c985af7b14aeff402efc75ba8aab Mon Sep 17 00:00:00 2001 From: James Agnew Date: Wed, 14 Apr 2021 17:41:32 -0400 Subject: [PATCH 42/61] Add framework for Bulk Import (#2538) * Start work on bul;k import * Work on bulk import * Have batch working * Working * Working * More work * More work on bulk export * Address fixmes * License header updates * Test fixes * License header updates * Test fix * Test fix * Version bumps * Work on config * Test cleanup * One more version bump * Version bump * CLeanup * A few additions * Test fixes * Test fix * Test fix * Migration fix * Test fix * Test fix --- .editorconfig | 1 + hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- .../java/ca/uhn/fhir/util/BundleBuilder.java | 3 +- .../ca/uhn/fhir/i18n/hapi-messages.properties | 4 +- hapi-fhir-bom/pom.xml | 4 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 8 +- .../hapi/fhir/changelog/5_4_0/changes.yaml | 25 + hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jaxrsserver-example/pom.xml | 2 +- hapi-fhir-jpaserver-api/pom.xml | 2 +- .../uhn/fhir/jpa/api/dao/IFhirSystemDao.java | 12 + hapi-fhir-jpaserver-base/pom.xml | 2 +- .../uhn/fhir/jpa/batch/BatchJobsConfig.java | 9 +- .../GoldenResourceAnnotatingProcessor.java | 2 +- .../api/BulkDataExportOptions.java | 2 +- .../{ => export}/api/IBulkDataExportSvc.java | 10 +- .../{ => export}/job/BaseBulkItemReader.java | 5 +- .../BulkExportCreateEntityStepListener.java | 12 +- ...portGenerateResourceFilesStepListener.java | 8 +- .../{ => export}/job/BulkExportJobCloser.java | 10 +- .../{ => export}/job/BulkExportJobConfig.java | 31 +- .../job/BulkExportJobParameterValidator.java | 3 +- .../job/BulkExportJobParametersBuilder.java | 4 +- .../bulk/{ => export}/job/BulkItemReader.java | 5 +- .../job/CreateBulkExportEntityTasklet.java | 8 +- .../GroupBulkExportJobParametersBuilder.java | 2 +- .../{ => export}/job/GroupBulkItemReader.java | 5 +- .../job/GroupIdPresentValidator.java | 4 +- .../job/PatientBulkItemReader.java | 4 +- .../job/ResourceToFileWriter.java | 4 +- .../job/ResourceTypePartitioner.java | 4 +- .../model/BulkExportJobStatusEnum.java} | 11 +- .../model/BulkExportResponseJson.java | 2 +- .../provider/BulkDataExportProvider.java | 8 +- .../svc/BulkDataExportSvcImpl.java | 31 +- .../svc/BulkExportCollectionFileDaoSvc.java | 2 +- .../{ => export}/svc/BulkExportDaoSvc.java | 10 +- .../bulk/imprt/api/IBulkDataImportSvc.java | 93 +++ .../ActivateBulkImportEntityStepListener.java | 51 ++ .../bulk/imprt/job/BulkImportFileReader.java | 76 ++ .../bulk/imprt/job/BulkImportFileWriter.java | 74 ++ .../bulk/imprt/job/BulkImportJobCloser.java | 57 ++ .../bulk/imprt/job/BulkImportJobConfig.java | 169 +++++ .../job/BulkImportJobParameterValidator.java | 70 ++ .../bulk/imprt/job/BulkImportPartitioner.java | 72 ++ ...BulkImportProcessStepCompletionPolicy.java | 41 ++ .../imprt/job/BulkImportStepListener.java | 63 ++ .../job/CreateBulkImportEntityTasklet.java | 45 ++ .../imprt/model/BulkImportJobFileJson.java | 51 ++ .../bulk/imprt/model/BulkImportJobJson.java | 72 ++ .../imprt/model/BulkImportJobStatusEnum.java | 34 + .../model/JobFileRowProcessingModeEnum.java | 34 + .../imprt/model/ParsedBulkImportRecord.java | 46 ++ .../bulk/imprt/svc/BulkDataImportSvcImpl.java | 280 ++++++++ .../ca/uhn/fhir/jpa/config/BaseConfig.java | 50 +- .../uhn/fhir/jpa/config/BaseDstu2Config.java | 9 + .../jpa/config/dstu3/BaseDstu3Config.java | 5 - .../uhn/fhir/jpa/config/r4/BaseR4Config.java | 5 - .../uhn/fhir/jpa/config/r5/BaseR5Config.java | 5 - .../fhir/jpa/dao/BaseHapiFhirSystemDao.java | 32 +- .../jpa/dao/BaseTransactionProcessor.java | 32 +- .../uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java | 661 ------------------ ...ansactionProcessorVersionAdapterDstu2.java | 171 +++++ .../fhir/jpa/dao/data/IBulkExportJobDao.java | 6 +- .../fhir/jpa/dao/data/IBulkImportJobDao.java | 40 ++ .../jpa/dao/data/IBulkImportJobFileDao.java | 43 ++ .../jpa/dao/dstu3/FhirSystemDaoDstu3.java | 16 - .../dao/expunge/ExpungeEverythingService.java | 4 + .../uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java | 28 - .../uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java | 22 - .../fhir/jpa/entity/BulkExportJobEntity.java | 12 +- .../fhir/jpa/entity/BulkImportJobEntity.java | 157 +++++ .../jpa/entity/BulkImportJobFileEntity.java | 104 +++ .../uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java | 58 ++ .../jpa/bulk/BulkDataExportProviderTest.java | 16 +- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 130 ++-- .../bulk/imprt/svc/BulkDataImportR4Test.java | 155 ++++ .../imprt/svc/BulkDataImportSvcImplTest.java | 145 ++++ .../java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java | 2 +- .../jpa/dao/TransactionProcessorTest.java | 2 +- .../fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java | 2 +- .../jpa/dao/dstu2/FhirSystemDaoDstu2Test.java | 8 +- .../fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java | 2 +- .../ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 12 +- ...esourceDaoR4SearchWithElasticSearchIT.java | 2 +- ...urceDaoR4SearchWithLuceneDisabledTest.java | 2 +- ...sourceDaoR4TerminologyElasticsearchIT.java | 2 +- .../fhir/jpa/dao/r4/FhirSystemDaoR4Test.java | 61 +- .../ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java | 2 +- .../ValueSetExpansionR4ElasticsearchIT.java | 2 +- hapi-fhir-jpaserver-batch/pom.xml | 2 +- .../ca/uhn/fhir/jpa/batch/BatchConstants.java | 32 + .../config/NonPersistedBatchConfigurer.java | 3 +- hapi-fhir-jpaserver-cql/pom.xml | 6 +- hapi-fhir-jpaserver-mdm/pom.xml | 6 +- hapi-fhir-jpaserver-migrate/pom.xml | 2 +- .../tasks/HapiFhirJpaMigrationTasks.java | 27 + hapi-fhir-jpaserver-model/pom.xml | 2 +- hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 4 +- .../ca/uhn/fhirtest/TestRestfulServer.java | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- ...eThymeleafNarrativeGeneratorDstu2Test.java | 86 ++- ...mThymeleafNarrativeGeneratorDstu2Test.java | 17 +- ...tThymeleafNarrativeGeneratorDstu2Test.java | 28 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- ...tThymeleafNarrativeGeneratorDstu3Test.java | 32 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- ...stomThymeleafNarrativeGeneratorR4Test.java | 135 ++-- ...aultThymeleafNarrativeGeneratorR4Test.java | 8 +- hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- .../fhir/test/utilities/ITestDataBuilder.java | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 16 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 80 +-- restful-server-example/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 151 files changed, 2891 insertions(+), 1279 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/api/BulkDataExportOptions.java (98%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/api/IBulkDataExportSvc.java (91%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BaseBulkItemReader.java (98%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BulkExportCreateEntityStepListener.java (78%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BulkExportGenerateResourceFilesStepListener.java (88%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BulkExportJobCloser.java (83%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BulkExportJobConfig.java (88%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BulkExportJobParameterValidator.java (98%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BulkExportJobParametersBuilder.java (95%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/BulkItemReader.java (94%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/CreateBulkExportEntityTasklet.java (93%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/GroupBulkExportJobParametersBuilder.java (96%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/GroupBulkItemReader.java (98%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/GroupIdPresentValidator.java (93%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/PatientBulkItemReader.java (97%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/ResourceToFileWriter.java (97%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/job/ResourceTypePartitioner.java (96%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{model/BulkJobStatusEnum.java => export/model/BulkExportJobStatusEnum.java} (77%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/model/BulkExportResponseJson.java (98%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/provider/BulkDataExportProvider.java (98%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/svc/BulkDataExportSvcImpl.java (95%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/svc/BulkExportCollectionFileDaoSvc.java (96%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/{ => export}/svc/BulkExportDaoSvc.java (90%) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java create mode 100644 hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java diff --git a/.editorconfig b/.editorconfig index f19de7e2a01..479bb985c23 100644 --- a/.editorconfig +++ b/.editorconfig @@ -31,6 +31,7 @@ charset = utf-8 indent_style = tab tab_width = 3 indent_size = 3 +continuation_indent_size=3 ij_java_align_consecutive_assignments = false ij_java_align_consecutive_variable_declarations = false ij_java_align_group_field_declarations = false diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 0bee4719670..2925cddee83 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 9eb400b46c0..60f23bd7bcc 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index ac1fd654c29..00ce73745e2 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java index ca79ffb009b..7e3c568663e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java @@ -156,7 +156,8 @@ public class BundleBuilder { // Bundle.entry.request.url IPrimitiveType url = (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); - url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().getValue()); + String resourceType = myContext.getResourceType(theResource); + url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue()); myEntryRequestUrlChild.getMutator().setValue(request, url); // Bundle.entry.request.url diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties index 26c705f6f74..e1e02ff0e12 100644 --- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties +++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties @@ -68,8 +68,8 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri # JPA Messages -ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export -ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0} +ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export +ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0} ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 6d6a435eddf..d3c726265b4 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -3,14 +3,14 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT pom HAPI FHIR BOM ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index b9a5e93b0f6..2816ae6159e 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index d7306fba4a9..7d9dc89fe6c 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index 4d1fd59e786..1055a8ed4ea 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index ac83c296578..8784078d56b 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index 78ec60b5d68..cef760eeff8 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 0783c0ea778..e7ac14275c6 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index ce5eca39f05..2ec2f22795c 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 837b2992aee..24b559ea001 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index 58d067d2797..40519292d48 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -78,13 +78,13 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu2 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT compile ca.uhn.hapi.fhir hapi-fhir-jpaserver-subscription - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT compile @@ -101,7 +101,7 @@ ca.uhn.hapi.fhir hapi-fhir-testpage-overlay - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT classes diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml new file mode 100644 index 00000000000..c973340bfd7 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml @@ -0,0 +1,25 @@ +--- +- item: + type: "add" + title: "The version of a few dependencies have been bumped to the latest versions +(dependent HAPI modules listed in brackets): +
      +
    • Commons-Lang3 (Core): 3.9 -> 3.12.0
    • +
    • Commons-Text (Core): 1.7 -> 1.9
    • +
    • Commons-Codec (Core): 1.14 -> 1.15
    • +
    • Commons-IO (Core): 2.6 -> 2.8.0
    • +
    • Guava (Core): 30.1-jre -> 30.1.1-jre
    • +
    • Jackson (Core): 2.12.1 -> 2.12.3
    • +
    • Woodstox (Core): 6.2.3 -> 6.2.5
    • +
    • Gson (JPA): 2.8.5 -> 2.8.6
    • +
    • Caffeine (JPA): 2.7.0 -> 3.0.1
    • +
    • Hibernate (JPA): 5.4.26.Final -> 5.4.30.Final
    • +
    • Hibernate Search (JPA): 6.0.0.Final -> 6.0.2.Final
    • +
    • Spring (JPA): 5.3.3 -> 5.3.6
    • +
    • Spring Batch (JPA): 4.2.3.RELEASE -> 4.3.2
    • +
    • Spring Data (JPA): 2.4.2 -> 2.4.7
    • +
    • Commons DBCP2 (JPA): 2.7.0 -> 2.8.0
    • +
    • Thymeleaf (Testpage Overlay): 3.0.11.RELEASE -> 3.0.12.RELEASE
    • +
    • JAnsi (CLI): 2.1.1 -> 2.3.2
    • +
    +" diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index bddf4b9e81b..43dd8b85ed7 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index 4d6434eab74..073ecc3dbf0 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-example/pom.xml b/hapi-fhir-jaxrsserver-example/pom.xml index 627009d073a..cce18f1d9c1 100644 --- a/hapi-fhir-jaxrsserver-example/pom.xml +++ b/hapi-fhir-jaxrsserver-example/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-jpaserver-api/pom.xml b/hapi-fhir-jpaserver-api/pom.xml index a1b87034a93..ac534aaccba 100644 --- a/hapi-fhir-jpaserver-api/pom.xml +++ b/hapi-fhir-jpaserver-api/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java index 2d810788d76..00b2ffb8027 100644 --- a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java +++ b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java @@ -67,6 +67,18 @@ public interface IFhirSystemDao extends IDao { */ IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage); + /** + * Executes a FHIR transaction using a new database transaction. This method must + * not be called from within a DB transaction. + */ T transaction(RequestDetails theRequestDetails, T theResources); + /** + * Executes a FHIR transaction nested inside the current database transaction. + * This form of the transaction processor can handle write operations only (no reads) + */ + default T transactionNested(RequestDetails theRequestDetails, T theResources) { + throw new UnsupportedOperationException(); + } + } diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 7b0116aad18..eedfef43910 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java index 178ee7358d5..fc4c03c7b34 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java @@ -20,17 +20,20 @@ package ca.uhn.fhir.jpa.batch; * #L% */ -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @Configuration //When you define a new batch job, add it here. @Import({ - CommonBatchJobConfig.class, - BulkExportJobConfig.class + CommonBatchJobConfig.class, + BulkExportJobConfig.class, + BulkImportJobConfig.class }) public class BatchJobsConfig { + public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob"; public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob"; public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob"; public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob"; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java index 4566aa83af3..46c418a1e74 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java @@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.jpa.batch.log.Logs; -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.util.ExtensionUtil; import ca.uhn.fhir.util.HapiExtensions; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/BulkDataExportOptions.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/BulkDataExportOptions.java index c63a0df0546..4f50d6fed97 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/BulkDataExportOptions.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.api; +package ca.uhn.fhir.jpa.bulk.export.api; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportSvc.java similarity index 91% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportSvc.java index af39667b19b..bbd1d1a6628 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportSvc.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.api; +package ca.uhn.fhir.jpa.bulk.export.api; /*- * #%L @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.api; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import org.hl7.fhir.instance.model.api.IIdType; import javax.transaction.Transactional; @@ -50,7 +50,7 @@ public interface IBulkDataExportSvc { class JobInfo { private String myJobId; - private BulkJobStatusEnum myStatus; + private BulkExportJobStatusEnum myStatus; private List myFiles; private String myRequest; private Date myStatusTime; @@ -90,11 +90,11 @@ public interface IBulkDataExportSvc { } - public BulkJobStatusEnum getStatus() { + public BulkExportJobStatusEnum getStatus() { return myStatus; } - public JobInfo setStatus(BulkJobStatusEnum theStatus) { + public JobInfo setStatus(BulkExportJobStatusEnum theStatus) { myStatus = theStatus; return this; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java index 9ab5e56a75d..7f934cfb248 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; -import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -102,7 +101,7 @@ public abstract class BaseBulkItemReader implements ItemReader getResourcePidIterator(); + protected abstract Iterator getResourcePidIterator(); protected List createSearchParameterMapsForResourceType() { BulkExportJobEntity jobEntity = getJobEntity(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportCreateEntityStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportCreateEntityStepListener.java similarity index 78% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportCreateEntityStepListener.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportCreateEntityStepListener.java index 96b25dc4073..8da195c344c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportCreateEntityStepListener.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportCreateEntityStepListener.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,16 +20,12 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; -import org.springframework.batch.core.BatchStatus; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecutionListener; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; - -import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * Will run before and after a job to set the status to whatever is appropriate. @@ -43,7 +39,7 @@ public class BulkExportCreateEntityStepListener implements StepExecutionListener public void beforeStep(StepExecution theStepExecution) { String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID"); if (jobUuid != null) { - myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.BUILDING); + myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.BUILDING); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportGenerateResourceFilesStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportGenerateResourceFilesStepListener.java similarity index 88% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportGenerateResourceFilesStepListener.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportGenerateResourceFilesStepListener.java index cbd7e651762..699055e6404 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportGenerateResourceFilesStepListener.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportGenerateResourceFilesStepListener.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecutionListener; @@ -55,7 +55,7 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio } assert isNotBlank(jobUuid); String exitDescription = theStepExecution.getExitStatus().getExitDescription(); - myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.ERROR, exitDescription); + myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.ERROR, exitDescription); } return theStepExecution.getExitStatus(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobCloser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobCloser.java similarity index 83% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobCloser.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobCloser.java index 6e336c16b54..291251894d3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobCloser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobCloser.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; @@ -44,9 +44,9 @@ public class BulkExportJobCloser implements Tasklet { @Override public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) { if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) { - myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.COMPLETE); + myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.COMPLETE); } else { - myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.ERROR); + myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.ERROR); } return RepeatStatus.FINISHED; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java similarity index 88% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java index 22a11abcbdc..6a44261b140 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor; import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -35,8 +35,6 @@ import org.springframework.batch.core.configuration.annotation.JobScope; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; import org.springframework.batch.item.support.CompositeItemProcessor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; @@ -59,6 +57,7 @@ public class BulkExportJobConfig { public static final String GROUP_ID_PARAMETER = "groupId"; public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes"; public static final int CHUNK_SIZE = 100; + public static final String JOB_DESCRIPTION = "jobDescription"; @Autowired private StepBuilderFactory myStepBuilderFactory; @@ -90,9 +89,9 @@ public class BulkExportJobConfig { @Lazy public Job bulkExportJob() { return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME) - .validator(bulkJobParameterValidator()) + .validator(bulkExportJobParameterValidator()) .start(createBulkExportEntityStep()) - .next(partitionStep()) + .next(bulkExportPartitionStep()) .next(closeJobStep()) .build(); } @@ -114,7 +113,7 @@ public class BulkExportJobConfig { public Job groupBulkExportJob() { return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) .validator(groupBulkJobParameterValidator()) - .validator(bulkJobParameterValidator()) + .validator(bulkExportJobParameterValidator()) .start(createBulkExportEntityStep()) .next(groupPartitionStep()) .next(closeJobStep()) @@ -125,7 +124,7 @@ public class BulkExportJobConfig { @Lazy public Job patientBulkExportJob() { return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) - .validator(bulkJobParameterValidator()) + .validator(bulkExportJobParameterValidator()) .start(createBulkExportEntityStep()) .next(patientPartitionStep()) .next(closeJobStep()) @@ -150,8 +149,9 @@ public class BulkExportJobConfig { return new CreateBulkExportEntityTasklet(); } + @Bean - public JobParametersValidator bulkJobParameterValidator() { + public JobParametersValidator bulkExportJobParameterValidator() { return new BulkExportJobParameterValidator(); } @@ -159,7 +159,7 @@ public class BulkExportJobConfig { @Bean public Step groupBulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep") - ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + ., List>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .reader(groupBulkItemReader()) .processor(inflateResourceThenAnnotateWithGoldenResourceProcessor()) .writer(resourceToFileWriter()) @@ -170,17 +170,18 @@ public class BulkExportJobConfig { @Bean public Step bulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep") - ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + ., List>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .reader(bulkItemReader()) .processor(myPidToIBaseResourceProcessor) .writer(resourceToFileWriter()) .listener(bulkExportGenerateResourceFilesStepListener()) .build(); } + @Bean public Step patientBulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep") - ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + ., List>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .reader(patientBulkItemReader()) .processor(myPidToIBaseResourceProcessor) .writer(resourceToFileWriter()) @@ -214,7 +215,7 @@ public class BulkExportJobConfig { } @Bean - public Step partitionStep() { + public Step bulkExportPartitionStep() { return myStepBuilderFactory.get("partitionStep") .partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner()) .step(bulkExportGenerateResourceFilesStep()) @@ -240,7 +241,7 @@ public class BulkExportJobConfig { @Bean @StepScope - public GroupBulkItemReader groupBulkItemReader(){ + public GroupBulkItemReader groupBulkItemReader() { return new GroupBulkItemReader(); } @@ -252,7 +253,7 @@ public class BulkExportJobConfig { @Bean @StepScope - public BulkItemReader bulkItemReader(){ + public BulkItemReader bulkItemReader() { return new BulkItemReader(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParameterValidator.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java index 01e503c6687..64d06052d43 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParameterValidator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersValidator; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParametersBuilder.java similarity index 95% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParametersBuilder.java index 7219c900c87..881e7215620 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParametersBuilder.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; import ca.uhn.fhir.rest.api.Constants; import org.springframework.batch.core.JobParametersBuilder; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkItemReader.java similarity index 94% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkItemReader.java index 0c9bf4fae2b..83f7c7d0d50 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import org.slf4j.Logger; -import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -43,7 +42,7 @@ public class BulkItemReader extends BaseBulkItemReader { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @Override - Iterator getResourcePidIterator() { + protected Iterator getResourcePidIterator() { ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID); Set myReadPids = new HashSet<>(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java similarity index 93% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java index 74e85d2188a..3a14e3e1145 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.StringUtils; @@ -87,7 +87,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet { } } - private void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) { + public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) { theChunkContext .getStepContext() .getStepExecution() diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkExportJobParametersBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java similarity index 96% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkExportJobParametersBuilder.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java index f79adc79ee1..5d9b90f7004 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkExportJobParametersBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java index 30a7567776e..3a10fec2aae 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -36,7 +36,6 @@ import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.ReferenceOrListParam; import ca.uhn.fhir.rest.param.ReferenceParam; -import com.google.common.collect.Multimaps; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; @@ -81,7 +80,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade private MdmExpansionCacheSvc myMdmExpansionCacheSvc; @Override - Iterator getResourcePidIterator() { + protected Iterator getResourcePidIterator() { Set myReadPids = new HashSet<>(); //Short circuit out if we detect we are attempting to extract patients diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupIdPresentValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupIdPresentValidator.java similarity index 93% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupIdPresentValidator.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupIdPresentValidator.java index a28eaaf0338..8e662049b5c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupIdPresentValidator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupIdPresentValidator.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -26,7 +26,7 @@ import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersValidator; -import static ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig.*; +import static ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig.*; import static org.slf4j.LoggerFactory.getLogger; public class GroupIdPresentValidator implements JobParametersValidator { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/PatientBulkItemReader.java similarity index 97% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/PatientBulkItemReader.java index c206404ac95..93519863ec7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/PatientBulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -61,7 +61,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea } @Override - Iterator getResourcePidIterator() { + protected Iterator getResourcePidIterator() { if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) { String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export"; ourLog.error(errorMessage); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java similarity index 97% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java index 82501c274c8..8b4ebe7e86a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -25,7 +25,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.log.Logs; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.Constants; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java similarity index 96% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java index 4cde1ab954b..7eb612d2211 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.slf4j.Logger; import org.springframework.batch.core.partition.support.Partitioner; import org.springframework.batch.item.ExecutionContext; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkJobStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java similarity index 77% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkJobStatusEnum.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java index e4fe675665c..db520b9cfd2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkJobStatusEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.model; +package ca.uhn.fhir.jpa.bulk.export.model; /*- * #%L @@ -20,7 +20,14 @@ package ca.uhn.fhir.jpa.bulk.model; * #L% */ -public enum BulkJobStatusEnum { +import com.fasterxml.jackson.annotation.JsonFormat; + +@JsonFormat(shape = JsonFormat.Shape.STRING) +public enum BulkExportJobStatusEnum { + + /** + * Sorting OK! + */ SUBMITTED, BUILDING, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkExportResponseJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkExportResponseJson.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java index 011eb6ddf14..31f3daf32d7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkExportResponseJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.model; +package ca.uhn.fhir.jpa.bulk.export.model; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java index 28c4bc39e05..57ca30d0f78 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.provider; +package ca.uhn.fhir.jpa.bulk.export.provider; /*- * #%L @@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.bulk.provider; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java similarity index 95% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java index 7bec3d61a89..872c036f63a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.svc; +package ca.uhn.fhir.jpa.bulk.export.svc; /*- * #%L @@ -23,16 +23,15 @@ package ca.uhn.fhir.jpa.bulk.svc; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; -import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; @@ -43,16 +42,12 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.UrlUtil; -import com.google.common.collect.Sets; import org.apache.commons.lang3.time.DateUtils; -import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBinary; -import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.InstantType; import org.quartz.JobExecutionContext; @@ -78,9 +73,9 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; -import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP; -import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT; -import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM; +import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.GROUP; +import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.PATIENT; +import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.SYSTEM; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -136,7 +131,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { Optional jobToProcessOpt = myTxTemplate.execute(t -> { Pageable page = PageRequest.of(0, 1); - Slice submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED); + Slice submittedJobs = myBulkExportJobDao.findByStatus(page, BulkExportJobStatusEnum.SUBMITTED); if (submittedJobs.isEmpty()) { return Optional.empty(); } @@ -158,7 +153,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { Optional submittedJobs = myBulkExportJobDao.findByJobId(jobUuid); if (submittedJobs.isPresent()) { BulkExportJobEntity jobEntity = submittedJobs.get(); - jobEntity.setStatus(BulkJobStatusEnum.ERROR); + jobEntity.setStatus(BulkExportJobStatusEnum.ERROR); jobEntity.setStatusMessage(e.getMessage()); myBulkExportJobDao.save(jobEntity); } @@ -344,7 +339,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { if (useCache) { Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis); Pageable page = PageRequest.of(0, 10); - Slice existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR); + Slice existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkExportJobStatusEnum.ERROR); if (!existing.isEmpty()) { return toSubmittedJobInfo(existing.iterator().next()); } @@ -373,7 +368,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { BulkExportJobEntity job = new BulkExportJobEntity(); job.setJobId(UUID.randomUUID().toString()); - job.setStatus(BulkJobStatusEnum.SUBMITTED); + job.setStatus(BulkExportJobStatusEnum.SUBMITTED); job.setSince(since); job.setCreated(new Date()); job.setRequest(request); @@ -445,7 +440,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { retVal.setStatusMessage(job.getStatusMessage()); retVal.setRequest(job.getRequest()); - if (job.getStatus() == BulkJobStatusEnum.COMPLETE) { + if (job.getStatus() == BulkExportJobStatusEnum.COMPLETE) { for (BulkExportCollectionEntity nextCollection : job.getCollections()) { for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) { retVal.addFile() diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportCollectionFileDaoSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportCollectionFileDaoSvc.java similarity index 96% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportCollectionFileDaoSvc.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportCollectionFileDaoSvc.java index 268cd4c29e6..cc255829231 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportCollectionFileDaoSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportCollectionFileDaoSvc.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.svc; +package ca.uhn.fhir.jpa.bulk.export.svc; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportDaoSvc.java similarity index 90% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportDaoSvc.java index d69f8cbc235..7aa6521b68e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportDaoSvc.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.svc; +package ca.uhn.fhir.jpa.bulk.export.svc; /*- * #%L @@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.bulk.svc; * #L% */ -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; @@ -84,12 +82,12 @@ public class BulkExportDaoSvc { } @Transactional - public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus) { + public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus) { setJobToStatus(theJobUUID, theStatus, null); } @Transactional - public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus, String theStatusMessage) { + public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus, String theStatusMessage) { Optional oJob = myBulkExportJobDao.findByJobId(theJobUUID); if (!oJob.isPresent()) { ourLog.error("Job with UUID {} doesn't exist!", theJobUUID); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java new file mode 100644 index 00000000000..7e6ef86b6ad --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java @@ -0,0 +1,93 @@ +package ca.uhn.fhir.jpa.bulk.imprt.api; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; + +import javax.annotation.Nonnull; +import java.util.List; + +public interface IBulkDataImportSvc { + + /** + * Create a new job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state (meaning it won't yet be worked on and can be added to) + */ + String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List theInitialFiles); + + /** + * Add more files to a job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state + * + * @param theJobId The job ID + * @param theFiles The files to add to the job + */ + void addFilesToJob(String theJobId, List theFiles); + + /** + * Move a job from {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} + * state to {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY} + * state, meaning that is is a candidate to be picked up for processing + * + * @param theJobId The job ID + */ + void markJobAsReadyForActivation(String theJobId); + + /** + * This method is intended to be called from the job scheduler, and will begin execution on + * the next job in status {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY} + * + * @return Returns {@literal true} if a job was activated + */ + boolean activateNextReadyJob(); + + /** + * Updates the job status for the given job + */ + void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus); + + /** + * Updates the job status for the given job + */ + void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage); + + /** + * Gets the number of files available for a given Job ID + * + * @param theJobId The job ID + * @return The file count + */ + BulkImportJobJson fetchJob(String theJobId); + + /** + * Fetch a given file by job ID + * + * @param theJobId The job ID + * @param theFileIndex The index of the file within the job + * @return The file + */ + BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex); + + /** + * Delete all input files associated with a particular job + */ + void deleteJobFiles(String theJobId); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java new file mode 100644 index 00000000000..4e842856773 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java @@ -0,0 +1,51 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.StepExecutionListener; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Will run before and after a job to set the status to whatever is appropriate. + */ +public class ActivateBulkImportEntityStepListener implements StepExecutionListener { + + @Autowired + private IBulkDataImportSvc myBulkImportDaoSvc; + + @Override + public void beforeStep(StepExecution theStepExecution) { + String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + if (jobUuid != null) { + myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING); + } + } + + @Override + public ExitStatus afterStep(StepExecution theStepExecution) { + return ExitStatus.EXECUTING; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java new file mode 100644 index 00000000000..601e76244f0 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java @@ -0,0 +1,76 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.batch.log.Logs; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; +import ca.uhn.fhir.util.IoUtil; +import com.google.common.io.LineReader; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +import java.io.StringReader; + +@SuppressWarnings("UnstableApiUsage") +public class BulkImportFileReader implements ItemReader { + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + @Autowired + private FhirContext myFhirContext; + @Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUuid; + @Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}") + private int myFileIndex; + + private StringReader myReader; + private LineReader myLineReader; + private int myLineIndex; + private String myTenantName; + + @Override + public ParsedBulkImportRecord read() throws Exception { + + if (myReader == null) { + BulkImportJobFileJson file = myBulkDataImportSvc.fetchFile(myJobUuid, myFileIndex); + myTenantName = file.getTenantName(); + myReader = new StringReader(file.getContents()); + myLineReader = new LineReader(myReader); + } + + String nextLine = myLineReader.readLine(); + if (nextLine == null) { + IoUtil.closeQuietly(myReader); + return null; + } + + Logs.getBatchTroubleshootingLog().debug("Reading line {} file index {} for job: {}", myLineIndex++, myFileIndex, myJobUuid); + + IBaseResource parsed = myFhirContext.newJsonParser().parseResource(nextLine); + return new ParsedBulkImportRecord(myTenantName, parsed); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java new file mode 100644 index 00000000000..5f893474c26 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java @@ -0,0 +1,74 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +import java.util.List; + +public class BulkImportFileWriter implements ItemWriter { + + private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class); + @Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUuid; + @Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}") + private int myFileIndex; + @Value("#{stepExecutionContext['" + BulkImportPartitioner.ROW_PROCESSING_MODE + "']}") + private JobFileRowProcessingModeEnum myRowProcessingMode; + @Autowired + private DaoRegistry myDaoRegistry; + + @SuppressWarnings({"SwitchStatementWithTooFewBranches", "rawtypes", "unchecked"}) + @Override + public void write(List theItemLists) throws Exception { + ourLog.info("Beginning bulk import write {} chunks Job[{}] FileIndex[{}]", theItemLists.size(), myJobUuid, myFileIndex); + + for (ParsedBulkImportRecord nextItem : theItemLists) { + + SystemRequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setTenantId(nextItem.getTenantName()); + + // Yeah this is a lame switch - We'll add more later I swear + switch (myRowProcessingMode) { + default: + case FHIR_TRANSACTION: + IFhirSystemDao systemDao = myDaoRegistry.getSystemDao(); + IBaseResource inputBundle = nextItem.getRowContent(); + systemDao.transactionNested(requestDetails, inputBundle); + break; + } + + } + + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java new file mode 100644 index 00000000000..504874e327d --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java @@ -0,0 +1,57 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +/** + * Will run before and after a job to set the status to whatever is appropriate. + */ +public class BulkImportJobCloser implements Tasklet { + + @Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUUID; + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + + @Override + public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) { + BatchStatus executionStatus = theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus(); + if (executionStatus == BatchStatus.STARTED) { + myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.COMPLETE); + myBulkDataImportSvc.deleteJobFiles(myJobUUID); + } else { + myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.ERROR, "Found job in status: " + executionStatus); + myBulkDataImportSvc.deleteJobFiles(myJobUUID); + } + return RepeatStatus.FINISHED; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java new file mode 100644 index 00000000000..fd86a8ff3ab --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java @@ -0,0 +1,169 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.batch.BatchConstants; +import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobParametersValidator; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.JobScope; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.partition.PartitionHandler; +import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler; +import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.repeat.CompletionPolicy; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; +import org.springframework.core.task.TaskExecutor; + +import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME; + +/** + * Spring batch Job configuration file. Contains all necessary plumbing to run a + * Bulk Export job. + */ +@Configuration +public class BulkImportJobConfig { + + public static final String JOB_PARAM_COMMIT_INTERVAL = "commitInterval"; + + @Autowired + private StepBuilderFactory myStepBuilderFactory; + + @Autowired + private JobBuilderFactory myJobBuilderFactory; + + @Autowired + @Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR) + private TaskExecutor myTaskExecutor; + + @Bean(name = BULK_IMPORT_JOB_NAME) + @Lazy + public Job bulkImportJob() throws Exception { + return myJobBuilderFactory.get(BULK_IMPORT_JOB_NAME) + .validator(bulkImportJobParameterValidator()) + .start(bulkImportPartitionStep()) + .next(bulkImportCloseJobStep()) + .build(); + } + + @Bean + public JobParametersValidator bulkImportJobParameterValidator() { + return new BulkImportJobParameterValidator(); + } + + @Bean + public CreateBulkImportEntityTasklet createBulkImportEntityTasklet() { + return new CreateBulkImportEntityTasklet(); + } + + @Bean + @JobScope + public ActivateBulkImportEntityStepListener activateBulkImportEntityStepListener() { + return new ActivateBulkImportEntityStepListener(); + } + + @Bean + public Step bulkImportPartitionStep() throws Exception { + return myStepBuilderFactory.get("bulkImportPartitionStep") + .partitioner("bulkImportPartitionStep", bulkImportPartitioner()) + .partitionHandler(partitionHandler()) + .listener(activateBulkImportEntityStepListener()) + .gridSize(10) + .build(); + } + + private PartitionHandler partitionHandler() throws Exception { + assert myTaskExecutor != null; + + TaskExecutorPartitionHandler retVal = new TaskExecutorPartitionHandler(); + retVal.setStep(bulkImportProcessFilesStep()); + retVal.setTaskExecutor(myTaskExecutor); + retVal.afterPropertiesSet(); + return retVal; + } + + @Bean + public Step bulkImportCloseJobStep() { + return myStepBuilderFactory.get("bulkImportCloseJobStep") + .tasklet(bulkImportJobCloser()) + .build(); + } + + @Bean + @JobScope + public BulkImportJobCloser bulkImportJobCloser() { + return new BulkImportJobCloser(); + } + + @Bean + @JobScope + public BulkImportPartitioner bulkImportPartitioner() { + return new BulkImportPartitioner(); + } + + + @Bean + public Step bulkImportProcessFilesStep() { + CompletionPolicy completionPolicy = completionPolicy(); + + return myStepBuilderFactory.get("bulkImportProcessFilesStep") + .chunk(completionPolicy) + .reader(bulkImportFileReader()) + .writer(bulkImportFileWriter()) + .listener(bulkImportStepListener()) + .listener(completionPolicy) + .build(); + } + + @Bean + @StepScope + public CompletionPolicy completionPolicy() { + return new BulkImportProcessStepCompletionPolicy(); + } + + @Bean + @StepScope + public ItemWriter bulkImportFileWriter() { + return new BulkImportFileWriter(); + } + + + @Bean + @StepScope + public BulkImportFileReader bulkImportFileReader() { + return new BulkImportFileReader(); + } + + @Bean + @StepScope + public BulkImportStepListener bulkImportStepListener() { + return new BulkImportStepListener(); + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java new file mode 100644 index 00000000000..a46405fec31 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java @@ -0,0 +1,70 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import org.apache.commons.lang3.StringUtils; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersInvalidException; +import org.springframework.batch.core.JobParametersValidator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import java.util.Optional; + +/** + * This class will prevent a job from running if the UUID does not exist or is invalid. + */ +public class BulkImportJobParameterValidator implements JobParametersValidator { + + @Autowired + private IBulkImportJobDao myBulkImportJobDao; + @Autowired + private PlatformTransactionManager myTransactionManager; + + @Override + public void validate(JobParameters theJobParameters) throws JobParametersInvalidException { + if (theJobParameters == null) { + throw new JobParametersInvalidException("This job needs Parameters: [jobUUID]"); + } + + TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); + String errorMessage = txTemplate.execute(tx -> { + StringBuilder errorBuilder = new StringBuilder(); + String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + Optional oJob = myBulkImportJobDao.findByJobId(jobUUID); + if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) { + errorBuilder.append("There is no persisted job that exists with UUID: "); + errorBuilder.append(jobUUID); + errorBuilder.append(". "); + } + + return errorBuilder.toString(); + }); + + if (!StringUtils.isEmpty(errorMessage)) { + throw new JobParametersInvalidException(errorMessage); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java new file mode 100644 index 00000000000..626c8caa016 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java @@ -0,0 +1,72 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import org.slf4j.Logger; +import org.springframework.batch.core.partition.support.Partitioner; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +import javax.annotation.Nonnull; +import java.util.HashMap; +import java.util.Map; + +import static org.slf4j.LoggerFactory.getLogger; + +public class BulkImportPartitioner implements Partitioner { + public static final String FILE_INDEX = "fileIndex"; + public static final String ROW_PROCESSING_MODE = "rowProcessingMode"; + + private static final Logger ourLog = getLogger(BulkImportPartitioner.class); + + @Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUUID; + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + + @Nonnull + @Override + public Map partition(int gridSize) { + Map retVal = new HashMap<>(); + + BulkImportJobJson job = myBulkDataImportSvc.fetchJob(myJobUUID); + + for (int i = 0; i < job.getFileCount(); i++) { + + ExecutionContext context = new ExecutionContext(); + context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID); + context.putInt(FILE_INDEX, i); + context.put(ROW_PROCESSING_MODE, job.getProcessingMode()); + + String key = "FILE" + i; + retVal.put(key, context); + } + + return retVal; + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java new file mode 100644 index 00000000000..3a3afefc636 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java @@ -0,0 +1,41 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.springframework.batch.repeat.RepeatContext; +import org.springframework.batch.repeat.policy.CompletionPolicySupport; +import org.springframework.beans.factory.annotation.Value; + +import static ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL; + +public class BulkImportProcessStepCompletionPolicy extends CompletionPolicySupport { + + @Value("#{jobParameters['" + JOB_PARAM_COMMIT_INTERVAL + "']}") + private int myChunkSize; + + @Override + public boolean isComplete(RepeatContext context) { + if (context.getStartedCount() < myChunkSize) { + return false; + } + return true; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java new file mode 100644 index 00000000000..8cb1c9b2693 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java @@ -0,0 +1,63 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.StepExecutionListener; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.Nonnull; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +/** + * This class sets the job status to ERROR if any failures occur while actually + * generating the export files. + */ +public class BulkImportStepListener implements StepExecutionListener { + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + + @Override + public void beforeStep(@Nonnull StepExecution stepExecution) { + // nothing + } + + @Override + public ExitStatus afterStep(StepExecution theStepExecution) { + if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) { + //Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context. + String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + if (jobUuid == null) { + jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + } + assert isNotBlank(jobUuid); + String exitDescription = theStepExecution.getExitStatus().getExitDescription(); + myBulkDataImportSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.ERROR, exitDescription); + } + return theStepExecution.getExitStatus(); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java new file mode 100644 index 00000000000..c543ba4961f --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java @@ -0,0 +1,45 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet; +import ca.uhn.fhir.util.ValidateUtil; +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; + +import java.util.Map; + +public class CreateBulkImportEntityTasklet implements Tasklet { + + @Override + public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception { + Map jobParameters = theChunkContext.getStepContext().getJobParameters(); + + //We can leave early if they provided us with an existing job. + ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER), "Job doesn't have a UUID"); + CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER)); + return RepeatStatus.FINISHED; + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java new file mode 100644 index 00000000000..fb215a2fdce --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java @@ -0,0 +1,51 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.model.api.IModelJson; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class BulkImportJobFileJson implements IModelJson { + + @JsonProperty("tenantName") + private String myTenantName; + @JsonProperty("contents") + private String myContents; + + public String getTenantName() { + return myTenantName; + } + + public BulkImportJobFileJson setTenantName(String theTenantName) { + myTenantName = theTenantName; + return this; + } + + public String getContents() { + return myContents; + } + + public BulkImportJobFileJson setContents(String theContents) { + myContents = theContents; + return this; + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java new file mode 100644 index 00000000000..fe6ea10d0ba --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java @@ -0,0 +1,72 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.model.api.IModelJson; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class BulkImportJobJson implements IModelJson { + + @JsonProperty("processingMode") + private JobFileRowProcessingModeEnum myProcessingMode; + @JsonProperty("jobDescription") + private String myJobDescription; + @JsonProperty("fileCount") + private int myFileCount; + @JsonProperty("batchSize") + private int myBatchSize; + + public String getJobDescription() { + return myJobDescription; + } + + public BulkImportJobJson setJobDescription(String theJobDescription) { + myJobDescription = theJobDescription; + return this; + } + + public JobFileRowProcessingModeEnum getProcessingMode() { + return myProcessingMode; + } + + public BulkImportJobJson setProcessingMode(JobFileRowProcessingModeEnum theProcessingMode) { + myProcessingMode = theProcessingMode; + return this; + } + + public int getFileCount() { + return myFileCount; + } + + public BulkImportJobJson setFileCount(int theFileCount) { + myFileCount = theFileCount; + return this; + } + + public int getBatchSize() { + return myBatchSize; + } + + public BulkImportJobJson setBatchSize(int theBatchSize) { + myBatchSize = theBatchSize; + return this; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java new file mode 100644 index 00000000000..5c3fe355224 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java @@ -0,0 +1,34 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import com.fasterxml.jackson.annotation.JsonFormat; + +@JsonFormat(shape = JsonFormat.Shape.STRING) +public enum BulkImportJobStatusEnum { + + STAGING, + READY, + RUNNING, + COMPLETE, + ERROR + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java new file mode 100644 index 00000000000..92826d97242 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java @@ -0,0 +1,34 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import com.fasterxml.jackson.annotation.JsonFormat; + +@JsonFormat(shape = JsonFormat.Shape.STRING) +public enum JobFileRowProcessingModeEnum { + + /** + * Sorting OK + */ + + FHIR_TRANSACTION + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java new file mode 100644 index 00000000000..fba884734c0 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java @@ -0,0 +1,46 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.io.Serializable; + +public class ParsedBulkImportRecord implements Serializable { + + private static final long serialVersionUID = 1L; + + private final String myTenantName; + private final IBaseResource myRowContent; + + public ParsedBulkImportRecord(String theTenantName, IBaseResource theRowContent) { + myTenantName = theTenantName; + myRowContent = theRowContent; + } + + public String getTenantName() { + return myTenantName; + } + + public IBaseResource getRowContent() { + return myRowContent; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java new file mode 100644 index 00000000000..2bbb97abe5d --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java @@ -0,0 +1,280 @@ +package ca.uhn.fhir.jpa.bulk.imprt.svc; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.batch.BatchJobsConfig; +import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import ca.uhn.fhir.jpa.model.sched.HapiJob; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.ValidateUtil; +import org.apache.commons.lang3.time.DateUtils; +import org.quartz.JobExecutionContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.JobParametersInvalidException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; +import javax.transaction.Transactional; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +public class BulkDataImportSvcImpl implements IBulkDataImportSvc { + private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportSvcImpl.class); + @Autowired + private IBulkImportJobDao myJobDao; + + @Autowired + private IBulkImportJobFileDao myJobFileDao; + @Autowired + private PlatformTransactionManager myTxManager; + private TransactionTemplate myTxTemplate; + @Autowired + private ISchedulerService mySchedulerService; + @Autowired + private IBatchJobSubmitter myJobSubmitter; + @Autowired + @Qualifier(BatchJobsConfig.BULK_IMPORT_JOB_NAME) + private org.springframework.batch.core.Job myBulkImportJob; + + @PostConstruct + public void start() { + myTxTemplate = new TransactionTemplate(myTxManager); + + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(ActivationJob.class.getName()); + jobDetail.setJobClass(ActivationJob.class); + mySchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_SECOND, jobDetail); + } + + @Override + @Transactional + public String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List theInitialFiles) { + ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription, "Job must not be null"); + ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription.getProcessingMode(), "Job File Processing mode must not be null"); + ValidateUtil.isTrueOrThrowInvalidRequest(theJobDescription.getBatchSize() > 0, "Job File Batch Size must be > 0"); + + String jobId = UUID.randomUUID().toString(); + + ourLog.info("Creating new Bulk Import job with {} files, assigning job ID: {}", theInitialFiles.size(), jobId); + + BulkImportJobEntity job = new BulkImportJobEntity(); + job.setJobId(jobId); + job.setFileCount(theInitialFiles.size()); + job.setStatus(BulkImportJobStatusEnum.STAGING); + job.setJobDescription(theJobDescription.getJobDescription()); + job.setBatchSize(theJobDescription.getBatchSize()); + job.setRowProcessingMode(theJobDescription.getProcessingMode()); + job = myJobDao.save(job); + + int nextSequence = 0; + addFilesToJob(theInitialFiles, job, nextSequence); + + return jobId; + } + + @Override + @Transactional + public void addFilesToJob(String theJobId, List theFiles) { + ourLog.info("Adding {} files to bulk import job: {}", theFiles.size(), theJobId); + + BulkImportJobEntity job = findJobByJobId(theJobId); + + ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Job %s has status %s and can not be added to", theJobId, job.getStatus()); + + addFilesToJob(theFiles, job, job.getFileCount()); + + job.setFileCount(job.getFileCount() + theFiles.size()); + myJobDao.save(job); + } + + private BulkImportJobEntity findJobByJobId(String theJobId) { + BulkImportJobEntity job = myJobDao + .findByJobId(theJobId) + .orElseThrow(() -> new InvalidRequestException("Unknown job ID: " + theJobId)); + return job; + } + + @Override + @Transactional + public void markJobAsReadyForActivation(String theJobId) { + ourLog.info("Activating bulk import job {}", theJobId); + + BulkImportJobEntity job = findJobByJobId(theJobId); + ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Bulk import job %s can not be activated in status: %s", theJobId, job.getStatus()); + + job.setStatus(BulkImportJobStatusEnum.READY); + myJobDao.save(job); + } + + /** + * To be called by the job scheduler + */ + @Transactional(value = Transactional.TxType.NEVER) + @Override + public boolean activateNextReadyJob() { + + Optional jobToProcessOpt = Objects.requireNonNull(myTxTemplate.execute(t -> { + Pageable page = PageRequest.of(0, 1); + Slice submittedJobs = myJobDao.findByStatus(page, BulkImportJobStatusEnum.READY); + if (submittedJobs.isEmpty()) { + return Optional.empty(); + } + return Optional.of(submittedJobs.getContent().get(0)); + })); + + if (!jobToProcessOpt.isPresent()) { + return false; + } + + BulkImportJobEntity bulkImportJobEntity = jobToProcessOpt.get(); + + String jobUuid = bulkImportJobEntity.getJobId(); + try { + processJob(bulkImportJobEntity); + } catch (Exception e) { + ourLog.error("Failure while preparing bulk export extract", e); + myTxTemplate.execute(t -> { + Optional submittedJobs = myJobDao.findByJobId(jobUuid); + if (submittedJobs.isPresent()) { + BulkImportJobEntity jobEntity = submittedJobs.get(); + jobEntity.setStatus(BulkImportJobStatusEnum.ERROR); + jobEntity.setStatusMessage(e.getMessage()); + myJobDao.save(jobEntity); + } + return false; + }); + } + + return true; + } + + @Override + @Transactional + public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus) { + setJobToStatus(theJobId, theStatus, null); + } + + @Override + public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage) { + BulkImportJobEntity job = findJobByJobId(theJobId); + job.setStatus(theStatus); + job.setStatusMessage(theStatusMessage); + myJobDao.save(job); + } + + @Override + @Transactional + public BulkImportJobJson fetchJob(String theJobId) { + BulkImportJobEntity job = findJobByJobId(theJobId); + return job.toJson(); + } + + @Transactional + @Override + public BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex) { + BulkImportJobEntity job = findJobByJobId(theJobId); + + return myJobFileDao + .findForJob(job, theFileIndex) + .map(t -> t.toJson()) + .orElseThrow(() -> new IllegalArgumentException("Invalid index " + theFileIndex + " for job " + theJobId)); + } + + @Override + @Transactional + public void deleteJobFiles(String theJobId) { + BulkImportJobEntity job = findJobByJobId(theJobId); + List files = myJobFileDao.findAllIdsForJob(theJobId); + for (Long next : files) { + myJobFileDao.deleteById(next); + } + myJobDao.delete(job); + } + + private void processJob(BulkImportJobEntity theBulkExportJobEntity) throws JobParametersInvalidException { + String jobId = theBulkExportJobEntity.getJobId(); + int batchSize = theBulkExportJobEntity.getBatchSize(); + ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive"); + + JobParametersBuilder parameters = new JobParametersBuilder() + .addString(BulkExportJobConfig.JOB_UUID_PARAMETER, jobId) + .addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize); + + if(isNotBlank(theBulkExportJobEntity.getJobDescription())) { + parameters.addString(BulkExportJobConfig.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription()); + } + + ourLog.info("Submitting bulk import job {} to job scheduler", jobId); + + myJobSubmitter.runJob(myBulkImportJob, parameters.toJobParameters()); + } + + private void addFilesToJob(@Nonnull List theInitialFiles, BulkImportJobEntity job, int nextSequence) { + for (BulkImportJobFileJson nextFile : theInitialFiles) { + ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextFile.getContents(), "Job File Contents mode must not be null"); + + BulkImportJobFileEntity jobFile = new BulkImportJobFileEntity(); + jobFile.setJob(job); + jobFile.setContents(nextFile.getContents()); + jobFile.setTenantName(nextFile.getTenantName()); + jobFile.setFileSequence(nextSequence++); + myJobFileDao.save(jobFile); + } + } + + + public static class ActivationJob implements HapiJob { + @Autowired + private IBulkDataImportSvc myTarget; + + @Override + public void execute(JobExecutionContext theContext) { + myTarget.activateNextReadyJob(); + } + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index a625e1d8ed3..e2a5f608117 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -11,15 +11,18 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; +import ca.uhn.fhir.jpa.batch.BatchConstants; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer; import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; -import ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl; import ca.uhn.fhir.jpa.cache.IResourceVersionSvc; import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl; import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider; @@ -29,6 +32,7 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.dao.MatchResourceUrlService; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; +import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation; @@ -63,7 +67,6 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor; import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor; -import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor; import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager; @@ -95,8 +98,8 @@ import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ForcedIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; @@ -129,6 +132,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.validation.JpaResourceLoader; import ca.uhn.fhir.jpa.validation.ValidationSettings; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; import org.hibernate.jpa.HibernatePersistenceProvider; @@ -160,6 +164,7 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import javax.annotation.Nullable; import javax.annotation.PostConstruct; import java.util.Date; +import java.util.concurrent.RejectedExecutionHandler; /* * #%L @@ -185,7 +190,7 @@ import java.util.Date; @Configuration @EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data") @Import({ - SearchParamConfig.class, BatchJobsConfig.class + SearchParamConfig.class, BatchJobsConfig.class }) @EnableBatchProcessing public abstract class BaseConfig { @@ -199,24 +204,23 @@ public abstract class BaseConfig { public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider"; public static final String SEARCH_BUILDER = "SearchBuilder"; public static final String HISTORY_BUILDER = "HistoryBuilder"; - private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI"; public static final String REPOSITORY_VALIDATING_RULE_BUILDER = "repositoryValidatingRuleBuilder"; - + private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI"; @Autowired protected Environment myEnv; @Autowired private DaoRegistry myDaoRegistry; + private Integer searchCoordCorePoolSize = 20; + private Integer searchCoordMaxPoolSize = 100; + private Integer searchCoordQueueCapacity = 200; /** * Subclasses may override this method to provide settings such as search coordinator pool sizes. */ @PostConstruct - public void initSettings() {} - - private Integer searchCoordCorePoolSize = 20; - private Integer searchCoordMaxPoolSize = 100; - private Integer searchCoordQueueCapacity = 200; + public void initSettings() { + } public void setSearchCoordCorePoolSize(Integer searchCoordCorePoolSize) { this.searchCoordCorePoolSize = searchCoordCorePoolSize; @@ -297,6 +301,11 @@ public abstract class BaseConfig { return new SubscriptionTriggeringProvider(); } + @Bean + public TransactionProcessor transactionProcessor() { + return new TransactionProcessor(); + } + @Bean(name = "myAttachmentBinaryAccessProvider") @Lazy public BinaryAccessProvider binaryAccessProvider() { @@ -381,13 +390,15 @@ public abstract class BaseConfig { return retVal; } - @Bean + @Bean(name= BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR) public TaskExecutor jobLaunchingTaskExecutor() { ThreadPoolTaskExecutor asyncTaskExecutor = new ThreadPoolTaskExecutor(); - asyncTaskExecutor.setCorePoolSize(5); + asyncTaskExecutor.setCorePoolSize(0); asyncTaskExecutor.setMaxPoolSize(10); - asyncTaskExecutor.setQueueCapacity(500); + asyncTaskExecutor.setQueueCapacity(0); + asyncTaskExecutor.setAllowCoreThreadTimeOut(true); asyncTaskExecutor.setThreadNamePrefix("JobLauncher-"); + asyncTaskExecutor.setRejectedExecutionHandler(new ResourceReindexingSvcImpl.BlockPolicy()); asyncTaskExecutor.initialize(); return asyncTaskExecutor; } @@ -514,6 +525,11 @@ public abstract class BaseConfig { return new BulkDataExportProvider(); } + @Bean + @Lazy + public IBulkDataImportSvc bulkDataImportSvc() { + return new BulkDataImportSvcImpl(); + } @Bean public PersistedJpaBundleProviderFactory persistedJpaBundleProviderFactory() { @@ -614,7 +630,7 @@ public abstract class BaseConfig { public QuantityNormalizedPredicateBuilder newQuantityNormalizedPredicateBuilder(SearchQueryBuilder theSearchBuilder) { return new QuantityNormalizedPredicateBuilder(theSearchBuilder); } - + @Bean @Scope("prototype") public ResourceLinkPredicateBuilder newResourceLinkPredicateBuilder(QueryStack theQueryStack, SearchQueryBuilder theSearchBuilder, boolean theReversed) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java index c012ebbfc94..aebba0133b4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java @@ -7,6 +7,9 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport; +import ca.uhn.fhir.jpa.dao.TransactionProcessor; +import ca.uhn.fhir.jpa.dao.TransactionProcessorVersionAdapterDstu2; +import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4; import ca.uhn.fhir.jpa.term.TermReadSvcDstu2; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.util.ResourceCountCache; @@ -93,6 +96,12 @@ public class BaseDstu2Config extends BaseConfig { return retVal; } + @Bean + public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { + return new TransactionProcessorVersionAdapterDstu2(); + } + + @Bean(name = "myDefaultProfileValidationSupport") public DefaultProfileValidationSupport defaultProfileValidationSupport() { return new DefaultProfileValidationSupport(fhirContext()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java index aa9472737c8..92529b25f5b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java @@ -87,11 +87,6 @@ public class BaseDstu3Config extends BaseConfigDstu3Plus { return new TransactionProcessorVersionAdapterDstu3(); } - @Bean - public TransactionProcessor transactionProcessor() { - return new TransactionProcessor(); - } - @Bean(name = "myResourceCountsCache") public ResourceCountCache resourceCountsCache() { ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java index beb5bc4eea0..6879d7dfd1b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java @@ -82,11 +82,6 @@ public class BaseR4Config extends BaseConfigDstu3Plus { return new TransactionProcessorVersionAdapterR4(); } - @Bean - public TransactionProcessor transactionProcessor() { - return new TransactionProcessor(); - } - @Bean(name = GRAPHQL_PROVIDER_NAME) @Lazy public GraphQLProvider graphQLProvider() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java index 6ccb22fef95..c217a864907 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java @@ -80,11 +80,6 @@ public class BaseR5Config extends BaseConfigDstu3Plus { return new TransactionProcessorVersionAdapterR5(); } - @Bean - public TransactionProcessor transactionProcessor() { - return new TransactionProcessor(); - } - @Bean(name = GRAPHQL_PROVIDER_NAME) @Lazy public GraphQLProvider graphQLProvider() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index da680a8f017..7e1877cc77d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -3,13 +3,14 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; -import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.util.StopWatch; +import com.google.common.annotations.VisibleForTesting; +import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -17,6 +18,7 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import javax.annotation.Nullable; +import javax.annotation.PostConstruct; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -42,14 +44,26 @@ import java.util.Map; * #L% */ -public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao implements IFhirSystemDao { +public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao implements IFhirSystemDao { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); @Autowired @Qualifier("myResourceCountsCache") public ResourceCountCache myResourceCountsCache; @Autowired - private PartitionSettings myPartitionSettings; + private TransactionProcessor myTransactionProcessor; + + @VisibleForTesting + public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) { + myTransactionProcessor = theTransactionProcessor; + } + + @Override + @PostConstruct + public void start() { + super.start(); + myTransactionProcessor.setDao(this); + } @Override @Transactional(propagation = Propagation.NEVER) @@ -91,6 +105,18 @@ public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest) { + public BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) { if (theRequestDetails != null && theRequestDetails.getServer() != null && myDao != null) { IServerInterceptor.ActionRequestDetails requestDetails = new IServerInterceptor.ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null); myDao.notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails); } String actionName = "Transaction"; - IBaseBundle response = processTransactionAsSubRequest((RequestDetails) theRequestDetails, theRequest, actionName); + IBaseBundle response = processTransactionAsSubRequest(theRequestDetails, theRequest, actionName, theNestedMode); List entries = myVersionAdapter.getEntries(response); for (int i = 0; i < entries.size(); i++) { @@ -190,7 +191,7 @@ public abstract class BaseTransactionProcessor { myVersionAdapter.setRequestUrl(entry, next.getIdElement().toUnqualifiedVersionless().getValue()); } - transaction(theRequestDetails, transactionBundle); + transaction(theRequestDetails, transactionBundle, false); return resp; } @@ -270,10 +271,10 @@ public abstract class BaseTransactionProcessor { myDao = theDao; } - private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName) { + private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName, boolean theNestedMode) { BaseHapiFhirDao.markRequestAsProcessingSubRequest(theRequestDetails); try { - return processTransaction(theRequestDetails, theRequest, theActionName); + return processTransaction(theRequestDetails, theRequest, theActionName, theNestedMode); } finally { BaseHapiFhirDao.clearRequestAsProcessingSubRequest(theRequestDetails); } @@ -289,7 +290,7 @@ public abstract class BaseTransactionProcessor { myTxManager = theTxManager; } - private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest) { + private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest, boolean theNestedMode) { ourLog.info("Beginning batch with {} resources", myVersionAdapter.getEntries(theRequest).size()); long start = System.currentTimeMillis(); @@ -310,7 +311,7 @@ public abstract class BaseTransactionProcessor { IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode()); myVersionAdapter.addEntry(subRequestBundle, (IBase) nextRequestEntry); - IBaseBundle nextResponseBundle = processTransactionAsSubRequest((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request"); + IBaseBundle nextResponseBundle = processTransactionAsSubRequest(theRequestDetails, subRequestBundle, "Batch sub-request", theNestedMode); IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); myVersionAdapter.addEntry(resp, subResponseEntry); @@ -341,7 +342,7 @@ public abstract class BaseTransactionProcessor { } long delay = System.currentTimeMillis() - start; - ourLog.info("Batch completed in {}ms", new Object[]{delay}); + ourLog.info("Batch completed in {}ms", delay); return resp; } @@ -351,13 +352,13 @@ public abstract class BaseTransactionProcessor { myHapiTransactionService = theHapiTransactionService; } - private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName) { + private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName, boolean theNestedMode) { validateDependencies(); String transactionType = myVersionAdapter.getBundleType(theRequest); if (org.hl7.fhir.r4.model.Bundle.BundleType.BATCH.toCode().equals(transactionType)) { - return batch(theRequestDetails, theRequest); + return batch(theRequestDetails, theRequest, theNestedMode); } if (transactionType == null) { @@ -465,6 +466,10 @@ public abstract class BaseTransactionProcessor { } for (IBase nextReqEntry : getEntries) { + if (theNestedMode) { + throw new InvalidRequestException("Can not invoke read operation on nested transaction"); + } + if (!(theRequestDetails instanceof ServletRequestDetails)) { throw new MethodNotAllowedException("Can not call transaction GET methods from this context"); } @@ -976,7 +981,12 @@ public abstract class BaseTransactionProcessor { } } - IPrimitiveType deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource); + IPrimitiveType deletedInstantOrNull; + if (nextResource instanceof IAnyResource) { + deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource); + } else { + deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) nextResource); + } Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null; IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextResource.getClass()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java index 9581599cac3..81ab30c20b7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java @@ -20,559 +20,19 @@ package ca.uhn.fhir.jpa.dao; * #L% */ -import ca.uhn.fhir.context.RuntimeResourceDefinition; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; -import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; -import ca.uhn.fhir.jpa.api.model.DeleteConflictList; -import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome; -import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; -import ca.uhn.fhir.jpa.delete.DeleteConflictService; -import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; -import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.TagDefinition; -import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; -import ca.uhn.fhir.jpa.searchparam.MatchUrlService; -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; -import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt; import ca.uhn.fhir.model.dstu2.composite.MetaDt; import ca.uhn.fhir.model.dstu2.resource.Bundle; -import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; -import ca.uhn.fhir.model.dstu2.resource.Bundle.EntryResponse; -import ca.uhn.fhir.model.dstu2.resource.OperationOutcome; -import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum; -import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum; -import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.model.primitive.InstantDt; -import ca.uhn.fhir.model.primitive.UriDt; -import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.parser.IParser; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.RestfulServerUtils; -import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.exceptions.NotModifiedException; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; -import ca.uhn.fhir.rest.server.method.BaseMethodBinding; -import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding; -import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails; -import ca.uhn.fhir.util.FhirTerser; -import ca.uhn.fhir.util.UrlUtil; -import ca.uhn.fhir.util.UrlUtil.UrlParts; -import com.google.common.collect.ArrayListMultimap; -import org.apache.http.NameValuePair; import org.hl7.fhir.instance.model.api.IBaseBundle; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; import javax.persistence.TypedQuery; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.LinkedHashSet; import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.apache.commons.lang3.StringUtils.defaultString; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoDstu2.class); - - @Autowired - private PlatformTransactionManager myTxManager; - @Autowired - private MatchUrlService myMatchUrlService; - @Autowired - private DaoRegistry myDaoRegistry; - @Autowired - private MatchResourceUrlService myMatchResourceUrlService; - @Autowired - private HapiTransactionService myHapiTransactionalService; - - private Bundle batch(final RequestDetails theRequestDetails, Bundle theRequest) { - ourLog.info("Beginning batch with {} resources", theRequest.getEntry().size()); - long start = System.currentTimeMillis(); - - Bundle resp = new Bundle(); - resp.setType(BundleTypeEnum.BATCH_RESPONSE); - - /* - * For batch, we handle each entry as a mini-transaction in its own database transaction so that if one fails, it doesn't prevent others - */ - - for (final Entry nextRequestEntry : theRequest.getEntry()) { - - TransactionCallback callback = new TransactionCallback() { - @Override - public Bundle doInTransaction(TransactionStatus theStatus) { - Bundle subRequestBundle = new Bundle(); - subRequestBundle.setType(BundleTypeEnum.TRANSACTION); - subRequestBundle.addEntry(nextRequestEntry); - return transaction((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request"); - } - }; - - BaseServerResponseException caughtEx; - try { - Bundle nextResponseBundle; - if (nextRequestEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) { - // Don't process GETs in a transaction because they'll - // create their own - nextResponseBundle = callback.doInTransaction(null); - } else { - nextResponseBundle = myHapiTransactionalService.execute(theRequestDetails, callback); - } - caughtEx = null; - - Entry subResponseEntry = nextResponseBundle.getEntry().get(0); - resp.addEntry(subResponseEntry); - /* - * If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it - */ - if (subResponseEntry.getResource() == null) { - subResponseEntry.setResource(nextResponseBundle.getEntry().get(0).getResource()); - } - - } catch (BaseServerResponseException e) { - caughtEx = e; - } catch (Throwable t) { - ourLog.error("Failure during BATCH sub transaction processing", t); - caughtEx = new InternalErrorException(t); - } - - if (caughtEx != null) { - Entry nextEntry = resp.addEntry(); - - OperationOutcome oo = new OperationOutcome(); - oo.addIssue().setSeverity(IssueSeverityEnum.ERROR).setDiagnostics(caughtEx.getMessage()); - nextEntry.setResource(oo); - - EntryResponse nextEntryResp = nextEntry.getResponse(); - nextEntryResp.setStatus(toStatusString(caughtEx.getStatusCode())); - } - - } - - long delay = System.currentTimeMillis() - start; - ourLog.info("Batch completed in {}ms", new Object[] {delay}); - - return resp; - } - - @SuppressWarnings("unchecked") - private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) { - BundleTypeEnum transactionType = theRequest.getTypeElement().getValueAsEnum(); - if (transactionType == BundleTypeEnum.BATCH) { - return batch(theRequestDetails, theRequest); - } - - return doTransaction(theRequestDetails, theRequest, theActionName, transactionType); - } - - private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, BundleTypeEnum theTransactionType) { - if (theTransactionType == null) { - String message = "Transaction Bundle did not specify valid Bundle.type, assuming " + BundleTypeEnum.TRANSACTION.getCode(); - ourLog.warn(message); - theTransactionType = BundleTypeEnum.TRANSACTION; - } - if (theTransactionType != BundleTypeEnum.TRANSACTION) { - throw new InvalidRequestException("Unable to process transaction where incoming Bundle.type = " + theTransactionType.getCode()); - } - - ourLog.info("Beginning {} with {} resources", theActionName, theRequest.getEntry().size()); - - long start = System.currentTimeMillis(); - TransactionDetails transactionDetails = new TransactionDetails(); - - Set allIds = new LinkedHashSet(); - Map idSubstitutions = new HashMap(); - Map idToPersistedOutcome = new HashMap(); - - /* - * We want to execute the transaction request bundle elements in the order - * specified by the FHIR specification (see TransactionSorter) so we save the - * original order in the request, then sort it. - * - * Entries with a type of GET are removed from the bundle so that they - * can be processed at the very end. We do this because the incoming resources - * are saved in a two-phase way in order to deal with interdependencies, and - * we want the GET processing to use the final indexing state - */ - Bundle response = new Bundle(); - List getEntries = new ArrayList(); - IdentityHashMap originalRequestOrder = new IdentityHashMap(); - for (int i = 0; i < theRequest.getEntry().size(); i++) { - originalRequestOrder.put(theRequest.getEntry().get(i), i); - response.addEntry(); - if (theRequest.getEntry().get(i).getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) { - getEntries.add(theRequest.getEntry().get(i)); - } - } - Collections.sort(theRequest.getEntry(), new TransactionSorter()); - - List deletedResources = new ArrayList<>(); - DeleteConflictList deleteConflicts = new DeleteConflictList(); - Map entriesToProcess = new IdentityHashMap<>(); - Set nonUpdatedEntities = new HashSet<>(); - Set updatedEntities = new HashSet<>(); - - /* - * Handle: GET/PUT/POST - */ - TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); - txTemplate.execute(t->{ - handleTransactionWriteOperations(theRequestDetails, theRequest, theActionName, transactionDetails, allIds, idSubstitutions, idToPersistedOutcome, response, originalRequestOrder, deletedResources, deleteConflicts, entriesToProcess, nonUpdatedEntities, updatedEntities); - return null; - }); - - /* - * Loop through the request and process any entries of type GET - */ - for (int i = 0; i < getEntries.size(); i++) { - Entry nextReqEntry = getEntries.get(i); - Integer originalOrder = originalRequestOrder.get(nextReqEntry); - Entry nextRespEntry = response.getEntry().get(originalOrder); - - ServletSubRequestDetails requestDetails = new ServletSubRequestDetails(theRequestDetails); - requestDetails.setServletRequest(theRequestDetails.getServletRequest()); - requestDetails.setRequestType(RequestTypeEnum.GET); - requestDetails.setServer(theRequestDetails.getServer()); - - String url = extractTransactionUrlOrThrowException(nextReqEntry, HTTPVerbEnum.GET); - - int qIndex = url.indexOf('?'); - ArrayListMultimap paramValues = ArrayListMultimap.create(); - requestDetails.setParameters(new HashMap()); - if (qIndex != -1) { - String params = url.substring(qIndex); - List parameters = UrlUtil.translateMatchUrl(params); - for (NameValuePair next : parameters) { - paramValues.put(next.getName(), next.getValue()); - } - for (Map.Entry> nextParamEntry : paramValues.asMap().entrySet()) { - String[] nextValue = nextParamEntry.getValue().toArray(new String[nextParamEntry.getValue().size()]); - requestDetails.addParameter(nextParamEntry.getKey(), nextValue); - } - url = url.substring(0, qIndex); - } - - requestDetails.setRequestPath(url); - requestDetails.setFhirServerBase(theRequestDetails.getFhirServerBase()); - - theRequestDetails.getServer().populateRequestDetailsFromRequestPath(requestDetails, url); - BaseMethodBinding method = theRequestDetails.getServer().determineResourceMethod(requestDetails, url); - if (method == null) { - throw new IllegalArgumentException("Unable to handle GET " + url); - } - - if (isNotBlank(nextReqEntry.getRequest().getIfMatch())) { - requestDetails.addHeader(Constants.HEADER_IF_MATCH, nextReqEntry.getRequest().getIfMatch()); - } - if (isNotBlank(nextReqEntry.getRequest().getIfNoneExist())) { - requestDetails.addHeader(Constants.HEADER_IF_NONE_EXIST, nextReqEntry.getRequest().getIfNoneExist()); - } - if (isNotBlank(nextReqEntry.getRequest().getIfNoneMatch())) { - requestDetails.addHeader(Constants.HEADER_IF_NONE_MATCH, nextReqEntry.getRequest().getIfNoneMatch()); - } - - if (method instanceof BaseResourceReturningMethodBinding) { - try { - IBaseResource resource = ((BaseResourceReturningMethodBinding) method).doInvokeServer(theRequestDetails.getServer(), requestDetails); - if (paramValues.containsKey(Constants.PARAM_SUMMARY) || paramValues.containsKey(Constants.PARAM_CONTENT)) { - resource = filterNestedBundle(requestDetails, resource); - } - nextRespEntry.setResource((IResource) resource); - nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK)); - } catch (NotModifiedException e) { - nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_304_NOT_MODIFIED)); - } - } else { - throw new IllegalArgumentException("Unable to handle GET " + url); - } - - } - - for (Map.Entry nextEntry : entriesToProcess.entrySet()) { - nextEntry.getKey().getResponse().setLocation(nextEntry.getValue().getIdDt().toUnqualified().getValue()); - nextEntry.getKey().getResponse().setEtag(nextEntry.getValue().getIdDt().getVersionIdPart()); - } - - long delay = System.currentTimeMillis() - start; - int numEntries = theRequest.getEntry().size(); - long delayPer = delay / numEntries; - ourLog.info("{} completed in {}ms ({} entries at {}ms per entry)", theActionName, delay, numEntries, delayPer); - - response.setType(BundleTypeEnum.TRANSACTION_RESPONSE); - return response; - } - - private void handleTransactionWriteOperations(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, TransactionDetails theTransactionDetails, Set theAllIds, Map theIdSubstitutions, Map theIdToPersistedOutcome, Bundle theResponse, IdentityHashMap theOriginalRequestOrder, List theDeletedResources, DeleteConflictList theDeleteConflicts, Map theEntriesToProcess, Set theNonUpdatedEntities, Set theUpdatedEntities) { - /* - * Loop through the request and process any entries of type - * PUT, POST or DELETE - */ - for (int i = 0; i < theRequest.getEntry().size(); i++) { - - if (i % 100 == 0) { - ourLog.debug("Processed {} non-GET entries out of {}", i, theRequest.getEntry().size()); - } - - Entry nextReqEntry = theRequest.getEntry().get(i); - IResource res = nextReqEntry.getResource(); - IdDt nextResourceId = null; - if (res != null) { - - nextResourceId = res.getId(); - - if (!nextResourceId.hasIdPart()) { - if (isNotBlank(nextReqEntry.getFullUrl())) { - nextResourceId = new IdDt(nextReqEntry.getFullUrl()); - } - } - - if (nextResourceId.hasIdPart() && nextResourceId.getIdPart().matches("[a-zA-Z]+:.*") && !isPlaceholder(nextResourceId)) { - throw new InvalidRequestException("Invalid placeholder ID found: " + nextResourceId.getIdPart() + " - Must be of the form 'urn:uuid:[uuid]' or 'urn:oid:[oid]'"); - } - - if (nextResourceId.hasIdPart() && !nextResourceId.hasResourceType() && !isPlaceholder(nextResourceId)) { - nextResourceId = new IdDt(toResourceName(res.getClass()), nextResourceId.getIdPart()); - res.setId(nextResourceId); - } - - /* - * Ensure that the bundle doesn't have any duplicates, since this causes all kinds of weirdness - */ - if (isPlaceholder(nextResourceId)) { - if (!theAllIds.add(nextResourceId)) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextResourceId)); - } - } else if (nextResourceId.hasResourceType() && nextResourceId.hasIdPart()) { - IdDt nextId = nextResourceId.toUnqualifiedVersionless(); - if (!theAllIds.add(nextId)) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextId)); - } - } - - } - - HTTPVerbEnum verb = nextReqEntry.getRequest().getMethodElement().getValueAsEnum(); - if (verb == null) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionEntryHasInvalidVerb", nextReqEntry.getRequest().getMethod())); - } - - String resourceType = res != null ? getContext().getResourceType(res) : null; - Entry nextRespEntry = theResponse.getEntry().get(theOriginalRequestOrder.get(nextReqEntry)); - - switch (verb) { - case POST: { - // CREATE - @SuppressWarnings("rawtypes") - IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass()); - res.setId((String) null); - DaoMethodOutcome outcome; - outcome = resourceDao.create(res, nextReqEntry.getRequest().getIfNoneExist(), false, theTransactionDetails, theRequestDetails); - handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res); - theEntriesToProcess.put(nextRespEntry, outcome.getEntity()); - if (outcome.getCreated() == false) { - theNonUpdatedEntities.add(outcome.getEntity()); - } - break; - } - case DELETE: { - // DELETE - String url = extractTransactionUrlOrThrowException(nextReqEntry, verb); - UrlParts parts = UrlUtil.parseUrl(url); - IFhirResourceDao dao = toDao(parts, verb.getCode(), url); - int status = Constants.STATUS_HTTP_204_NO_CONTENT; - if (parts.getResourceId() != null) { - DaoMethodOutcome outcome = dao.delete(new IdDt(parts.getResourceType(), parts.getResourceId()), theDeleteConflicts, theRequestDetails, theTransactionDetails); - if (outcome.getEntity() != null) { - theDeletedResources.add(outcome.getId().toUnqualifiedVersionless()); - theEntriesToProcess.put(nextRespEntry, outcome.getEntity()); - } - } else { - DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(parts.getResourceType() + '?' + parts.getParams(), theDeleteConflicts, theRequestDetails); - List allDeleted = deleteOutcome.getDeletedEntities(); - for (ResourceTable deleted : allDeleted) { - theDeletedResources.add(deleted.getIdDt().toUnqualifiedVersionless()); - } - if (allDeleted.isEmpty()) { - status = Constants.STATUS_HTTP_404_NOT_FOUND; - } - } - - nextRespEntry.getResponse().setStatus(toStatusString(status)); - break; - } - case PUT: { - // UPDATE - @SuppressWarnings("rawtypes") - IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass()); - - DaoMethodOutcome outcome; - - String url = extractTransactionUrlOrThrowException(nextReqEntry, verb); - - UrlParts parts = UrlUtil.parseUrl(url); - if (isNotBlank(parts.getResourceId())) { - res.setId(new IdDt(parts.getResourceType(), parts.getResourceId())); - outcome = resourceDao.update(res, null, false, theRequestDetails); - } else { - res.setId((String) null); - outcome = resourceDao.update(res, parts.getResourceType() + '?' + parts.getParams(), false, theRequestDetails); - } - - if (outcome.getCreated() == Boolean.FALSE) { - theUpdatedEntities.add(outcome.getEntity()); - } - - handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res); - theEntriesToProcess.put(nextRespEntry, outcome.getEntity()); - break; - } - case GET: - break; - } - } - - /* - * Make sure that there are no conflicts from deletions. E.g. we can't delete something - * if something else has a reference to it.. Unless the thing that has a reference to it - * was also deleted as a part of this transaction, which is why we check this now at the - * end. - */ - - theDeleteConflicts.removeIf(next -> theDeletedResources.contains(next.getTargetId().toVersionless())); - DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), theDeleteConflicts); - - /* - * Perform ID substitutions and then index each resource we have saved - */ - - FhirTerser terser = getContext().newTerser(); - for (DaoMethodOutcome nextOutcome : theIdToPersistedOutcome.values()) { - IResource nextResource = (IResource) nextOutcome.getResource(); - if (nextResource == null) { - continue; - } - - // References - List allRefs = terser.getAllPopulatedChildElementsOfType(nextResource, BaseResourceReferenceDt.class); - for (BaseResourceReferenceDt nextRef : allRefs) { - IdDt nextId = nextRef.getReference(); - if (!nextId.hasIdPart()) { - continue; - } - if (theIdSubstitutions.containsKey(nextId)) { - IdDt newId = theIdSubstitutions.get(nextId); - ourLog.debug(" * Replacing resource ref {} with {}", nextId, newId); - nextRef.setReference(newId); - } else { - ourLog.debug(" * Reference [{}] does not exist in bundle", nextId); - } - } - - // URIs - List allUris = terser.getAllPopulatedChildElementsOfType(nextResource, UriDt.class); - for (UriDt nextRef : allUris) { - if (nextRef instanceof IIdType) { - continue; // No substitution on the resource ID itself! - } - IdDt nextUriString = new IdDt(nextRef.getValueAsString()); - if (theIdSubstitutions.containsKey(nextUriString)) { - IdDt newId = theIdSubstitutions.get(nextUriString); - ourLog.debug(" * Replacing resource ref {} with {}", nextUriString, newId); - nextRef.setValue(newId.getValue()); - } else { - ourLog.debug(" * Reference [{}] does not exist in bundle", nextUriString); - } - } - - - InstantDt deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get(nextResource); - Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null; - if (theUpdatedEntities.contains(nextOutcome.getEntity())) { - updateInternal(theRequestDetails, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails); - } else if (!theNonUpdatedEntities.contains(nextOutcome.getEntity())) { - updateEntity(theRequestDetails, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theTransactionDetails, false, true); - } - } - - myEntityManager.flush(); - - /* - * Double check we didn't allow any duplicates we shouldn't have - */ - for (Entry nextEntry : theRequest.getEntry()) { - if (nextEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.POST) { - String matchUrl = nextEntry.getRequest().getIfNoneExist(); - if (isNotBlank(matchUrl)) { - Class resType = nextEntry.getResource().getClass(); - Set val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequestDetails); - if (val.size() > 1) { - throw new InvalidRequestException( - "Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?"); - } - } - } - } - - for (IdDt next : theAllIds) { - IdDt replacement = theIdSubstitutions.get(next); - if (replacement == null) { - continue; - } - if (replacement.equals(next)) { - continue; - } - ourLog.debug("Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement); - } - } - - private String extractTransactionUrlOrThrowException(Entry nextEntry, HTTPVerbEnum verb) { - String url = nextEntry.getRequest().getUrl(); - if (isBlank(url)) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionMissingUrl", verb.name())); - } - return url; - } - - /** - * This method is called for nested bundles (e.g. if we received a transaction with an entry that - * was a GET search, this method is called on the bundle for the search result, that will be placed in the - * outer bundle). This method applies the _summary and _content parameters to the output of - * that bundle. - *

    - * TODO: This isn't the most efficient way of doing this.. hopefully we can come up with something better in the future. - */ - private IBaseResource filterNestedBundle(RequestDetails theRequestDetails, IBaseResource theResource) { - IParser p = getContext().newJsonParser(); - RestfulServerUtils.configureResponseParser(theRequestDetails, p); - return p.parseResource(theResource.getClass(), p.encodeResourceToString(theResource)); - } @Override public MetaDt metaGetOperation(RequestDetails theRequestDetails) { @@ -589,31 +49,6 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { return retVal; } - private IFhirResourceDao toDao(UrlParts theParts, String theVerb, String theUrl) { - RuntimeResourceDefinition resType; - try { - resType = getContext().getResourceDefinition(theParts.getResourceType()); - } catch (DataFormatException e) { - String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl); - throw new InvalidRequestException(msg); - } - IFhirResourceDao dao = null; - if (resType != null) { - dao = this.myDaoRegistry.getResourceDaoOrNull(resType.getImplementingClass()); - } - if (dao == null) { - String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl); - throw new InvalidRequestException(msg); - } - - // if (theParts.getResourceId() == null && theParts.getParams() == null) { - // String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl); - // throw new InvalidRequestException(msg); - // } - - return dao; - } - protected MetaDt toMetaDt(Collection tagDefinitions) { MetaDt retVal = new MetaDt(); for (TagDefinition next : tagDefinitions) { @@ -632,105 +67,9 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { return retVal; } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - if (theRequestDetails != null) { - ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null); - notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails); - } - - String actionName = "Transaction"; - return transaction((ServletRequestDetails) theRequestDetails, theRequest, actionName); - } - - private Bundle transaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) { - markRequestAsProcessingSubRequest(theRequestDetails); - try { - return doTransaction(theRequestDetails, theRequest, theActionName); - } finally { - clearRequestAsProcessingSubRequest(theRequestDetails); - } - } - - private static void handleTransactionCreateOrUpdateOutcome(Map idSubstitutions, Map idToPersistedOutcome, IdDt nextResourceId, DaoMethodOutcome outcome, - Entry newEntry, String theResourceType, IResource theRes) { - IdDt newId = (IdDt) outcome.getId().toUnqualifiedVersionless(); - IdDt resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless(); - if (newId.equals(resourceId) == false) { - idSubstitutions.put(resourceId, newId); - if (isPlaceholder(resourceId)) { - /* - * The correct way for substitution IDs to be is to be with no resource type, but we'll accept the qualified kind too just to be lenient. - */ - idSubstitutions.put(new IdDt(theResourceType + '/' + resourceId.getValue()), newId); - } - } - idToPersistedOutcome.put(newId, outcome); - if (outcome.getCreated().booleanValue()) { - newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_201_CREATED)); - } else { - newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK)); - } - newEntry.getResponse().setLastModified(ResourceMetadataKeyEnum.UPDATED.get(theRes)); - } - - private static boolean isPlaceholder(IdDt theId) { - if (theId.getValue() != null) { - return theId.getValue().startsWith("urn:oid:") || theId.getValue().startsWith("urn:uuid:"); - } - return false; - } - - private static String toStatusString(int theStatusCode) { - return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode)); - } - @Override public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) { return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented(); } - - /** - * Transaction Order, per the spec: - *

    - * Process any DELETE interactions - * Process any POST interactions - * Process any PUT interactions - * Process any GET interactions - */ - public class TransactionSorter implements Comparator { - - @Override - public int compare(Entry theO1, Entry theO2) { - int o1 = toOrder(theO1); - int o2 = toOrder(theO2); - - return o1 - o2; - } - - private int toOrder(Entry theO1) { - int o1 = 0; - if (theO1.getRequest().getMethodElement().getValueAsEnum() != null) { - switch (theO1.getRequest().getMethodElement().getValueAsEnum()) { - case DELETE: - o1 = 1; - break; - case POST: - o1 = 2; - break; - case PUT: - o1 = 3; - break; - case GET: - o1 = 4; - break; - } - } - return o1; - } - - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java new file mode 100644 index 00000000000..b1b87a079e1 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java @@ -0,0 +1,171 @@ +package ca.uhn.fhir.jpa.dao; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.model.api.TemporalPrecisionEnum; +import ca.uhn.fhir.model.dstu2.resource.Bundle; +import ca.uhn.fhir.model.dstu2.resource.OperationOutcome; +import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum; +import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum; +import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum; +import ca.uhn.fhir.model.dstu2.valueset.IssueTypeEnum; +import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.exceptions.FHIRException; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.util.Date; +import java.util.List; + +public class TransactionProcessorVersionAdapterDstu2 implements TransactionProcessor.ITransactionProcessorVersionAdapter { + @Override + public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) { + theBundleEntry.getResponse().setStatus(theStatus); + } + + @Override + public void setResponseLastModified(Bundle.Entry theBundleEntry, Date theLastModified) { + theBundleEntry.getResponse().setLastModified(theLastModified, TemporalPrecisionEnum.MILLI); + } + + @Override + public void setResource(Bundle.Entry theBundleEntry, IBaseResource theResource) { + theBundleEntry.setResource((IResource) theResource); + } + + @Override + public IBaseResource getResource(Bundle.Entry theBundleEntry) { + return theBundleEntry.getResource(); + } + + @Override + public String getBundleType(Bundle theRequest) { + if (theRequest.getType() == null) { + return null; + } + return theRequest.getTypeElement().getValue(); + } + + @Override + public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.Entry theEntry) { + OperationOutcome oo = new OperationOutcome(); + oo.addIssue() + .setSeverity(IssueSeverityEnum.ERROR) + .setDiagnostics(theCaughtEx.getMessage()) + .setCode(IssueTypeEnum.EXCEPTION); + theEntry.setResource(oo); + } + + @Override + public Bundle createBundle(String theBundleType) { + Bundle resp = new Bundle(); + try { + resp.setType(BundleTypeEnum.forCode(theBundleType)); + } catch (FHIRException theE) { + throw new InternalErrorException("Unknown bundle type: " + theBundleType); + } + return resp; + } + + @Override + public List getEntries(Bundle theRequest) { + return theRequest.getEntry(); + } + + @Override + public void addEntry(Bundle theBundle, Bundle.Entry theEntry) { + theBundle.addEntry(theEntry); + } + + @Override + public Bundle.Entry addEntry(Bundle theBundle) { + return theBundle.addEntry(); + } + + @Override + public String getEntryRequestVerb(FhirContext theContext, Bundle.Entry theEntry) { + String retVal = null; + HTTPVerbEnum value = theEntry.getRequest().getMethodElement().getValueAsEnum(); + if (value != null) { + retVal = value.getCode(); + } + return retVal; + } + + @Override + public String getFullUrl(Bundle.Entry theEntry) { + return theEntry.getFullUrl(); + } + + @Override + public String getEntryIfNoneExist(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfNoneExist(); + } + + @Override + public String getEntryRequestUrl(Bundle.Entry theEntry) { + return theEntry.getRequest().getUrl(); + } + + @Override + public void setResponseLocation(Bundle.Entry theEntry, String theResponseLocation) { + theEntry.getResponse().setLocation(theResponseLocation); + } + + @Override + public void setResponseETag(Bundle.Entry theEntry, String theEtag) { + theEntry.getResponse().setEtag(theEtag); + } + + @Override + public String getEntryRequestIfMatch(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfMatch(); + } + + @Override + public String getEntryRequestIfNoneExist(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfNoneExist(); + } + + @Override + public String getEntryRequestIfNoneMatch(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfNoneMatch(); + } + + @Override + public void setResponseOutcome(Bundle.Entry theEntry, IBaseOperationOutcome theOperationOutcome) { + theEntry.setResource((IResource) theOperationOutcome); + } + + @Override + public void setRequestVerb(Bundle.Entry theEntry, String theVerb) { + theEntry.getRequest().setMethod(HTTPVerbEnum.forCode(theVerb)); + } + + @Override + public void setRequestUrl(Bundle.Entry theEntry, String theUrl) { + theEntry.getRequest().setUrl(theUrl); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java index 708425d5fdb..c5bf0ddf606 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java @@ -1,6 +1,6 @@ package ca.uhn.fhir.jpa.dao.data; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; @@ -38,13 +38,13 @@ public interface IBulkExportJobDao extends JpaRepository findByJobId(@Param("jobid") String theUuid); @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status") - Slice findByStatus(Pageable thePage, @Param("status") BulkJobStatusEnum theSubmitted); + Slice findByStatus(Pageable thePage, @Param("status") BulkExportJobStatusEnum theSubmitted); @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff") Slice findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff); @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC") - Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus); + Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkExportJobStatusEnum theNotStatus); @Modifying @Query("DELETE FROM BulkExportJobEntity t") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java new file mode 100644 index 00000000000..dccaa953eb8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java @@ -0,0 +1,40 @@ +package ca.uhn.fhir.jpa.dao.data; + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.Optional; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface IBulkImportJobDao extends JpaRepository { + + @Query("SELECT j FROM BulkImportJobEntity j WHERE j.myJobId = :jobid") + Optional findByJobId(@Param("jobid") String theUuid); + + @Query("SELECT j FROM BulkImportJobEntity j WHERE j.myStatus = :status") + Slice findByStatus(Pageable thePage, @Param("status") BulkImportJobStatusEnum theStatus); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java new file mode 100644 index 00000000000..c53e49f95a4 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java @@ -0,0 +1,43 @@ +package ca.uhn.fhir.jpa.dao.data; + +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.List; +import java.util.Optional; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface IBulkImportJobFileDao extends JpaRepository { + + @Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC") + List findAllForJob(@Param("jobId") String theJobId); + + @Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex") + Optional findForJob(@Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex); + + @Query("SELECT f.myId FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC") + List findAllIdsForJob(@Param("jobId") String theJobId); + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java index 0af11e3a082..96019ae6a21 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java @@ -22,18 +22,13 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; -import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import org.hl7.fhir.dstu3.model.Bundle; -import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent; import org.hl7.fhir.dstu3.model.Meta; import org.hl7.fhir.instance.model.api.IBaseBundle; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; import javax.annotation.PostConstruct; import javax.persistence.TypedQuery; @@ -42,14 +37,10 @@ import java.util.List; public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao { - @Autowired - private TransactionProcessor myTransactionProcessor; - @Override @PostConstruct public void start() { super.start(); - myTransactionProcessor.setDao(this); } @Override @@ -88,12 +79,5 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao { return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented(); } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - return myTransactionProcessor.transaction(theRequestDetails, theRequest); - } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java index 5da2372c9b6..e012ee235ad 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java @@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.SearchInclude; @@ -123,6 +125,8 @@ public class ExpungeEverythingService { counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class)); counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class)); counter.addAndGet(expungeEverythingByType(SearchParamPresent.class)); + counter.addAndGet(expungeEverythingByType(BulkImportJobFileEntity.class)); + counter.addAndGet(expungeEverythingByType(BulkImportJobEntity.class)); counter.addAndGet(expungeEverythingByType(ForcedId.class)); counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class)); counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java index 04baaca4922..a369f3d7e5f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java @@ -22,42 +22,20 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; -import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; -import com.google.common.annotations.VisibleForTesting; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Meta; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; -import javax.annotation.PostConstruct; import javax.persistence.TypedQuery; import java.util.Collection; import java.util.List; public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao { - @Autowired - private TransactionProcessor myTransactionProcessor; - - @VisibleForTesting - public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) { - myTransactionProcessor = theTransactionProcessor; - } - - @Override - @PostConstruct - public void start() { - super.start(); - myTransactionProcessor.setDao(this); - } - - @Override public Meta metaGetOperation(RequestDetails theRequestDetails) { // Notify interceptors @@ -95,10 +73,4 @@ public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao { return retVal; } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - return myTransactionProcessor.transaction(theRequestDetails, theRequest); - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java index 9d13bae6d1e..919d831e4a6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java @@ -22,20 +22,14 @@ package ca.uhn.fhir.jpa.dao.r5; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; -import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.r5.model.Bundle; -import org.hl7.fhir.r5.model.Bundle.BundleEntryComponent; import org.hl7.fhir.r5.model.Meta; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; -import javax.annotation.PostConstruct; import javax.persistence.TypedQuery; import java.util.Collection; import java.util.List; @@ -44,17 +38,6 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoR5.class); - @Autowired - private TransactionProcessor myTransactionProcessor; - - @Override - @PostConstruct - public void start() { - super.start(); - myTransactionProcessor.setDao(this); - } - - @Override public Meta metaGetOperation(RequestDetails theRequestDetails) { // Notify interceptors @@ -92,10 +75,5 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao { return retVal; } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - return myTransactionProcessor.transaction(theRequestDetails, theRequest); - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java index 05f68783fa7..f2f8a092715 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.entity; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.r5.model.InstantType; @@ -51,9 +51,9 @@ import static org.apache.commons.lang3.StringUtils.left; @Entity @Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = { - @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") + @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") }, indexes = { - @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") + @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") }) public class BulkExportJobEntity implements Serializable { @@ -70,7 +70,7 @@ public class BulkExportJobEntity implements Serializable { @Enumerated(EnumType.STRING) @Column(name = "JOB_STATUS", length = 10, nullable = false) - private BulkJobStatusEnum myStatus; + private BulkExportJobStatusEnum myStatus; @Temporal(TemporalType.TIMESTAMP) @Column(name = "CREATED_TIME", nullable = false) private Date myCreated; @@ -156,11 +156,11 @@ public class BulkExportJobEntity implements Serializable { return b.toString(); } - public BulkJobStatusEnum getStatus() { + public BulkExportJobStatusEnum getStatus() { return myStatus; } - public void setStatus(BulkJobStatusEnum theStatus) { + public void setStatus(BulkExportJobStatusEnum theStatus) { if (myStatus != theStatus) { myStatusTime = new Date(); myStatus = theStatus; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java new file mode 100644 index 00000000000..b7de7e9cc7b --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java @@ -0,0 +1,157 @@ +package ca.uhn.fhir.jpa.entity; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.UniqueConstraint; +import javax.persistence.Version; +import java.io.Serializable; +import java.util.Date; + +import static org.apache.commons.lang3.StringUtils.left; + +@Entity +@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = { + @UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID") +}) +public class BulkImportJobEntity implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID") + @SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID") + @Column(name = "PID") + private Long myId; + + @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false) + private String myJobId; + @Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN) + private String myJobDescription; + @Enumerated(EnumType.STRING) + @Column(name = "JOB_STATUS", length = 10, nullable = false) + private BulkImportJobStatusEnum myStatus; + @Version + @Column(name = "OPTLOCK", nullable = false) + private int myVersion; + @Column(name = "FILE_COUNT", nullable = false) + private int myFileCount; + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "STATUS_TIME", nullable = false) + private Date myStatusTime; + @Column(name = "STATUS_MESSAGE", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN) + private String myStatusMessage; + @Column(name = "ROW_PROCESSING_MODE", length = 20, nullable = false, updatable = false) + @Enumerated(EnumType.STRING) + private JobFileRowProcessingModeEnum myRowProcessingMode; + @Column(name = "BATCH_SIZE", nullable = false, updatable = false) + private int myBatchSize; + + public String getJobDescription() { + return myJobDescription; + } + + public void setJobDescription(String theJobDescription) { + myJobDescription = left(theJobDescription, BulkExportJobEntity.STATUS_MESSAGE_LEN); + } + + public JobFileRowProcessingModeEnum getRowProcessingMode() { + return myRowProcessingMode; + } + + public void setRowProcessingMode(JobFileRowProcessingModeEnum theRowProcessingMode) { + myRowProcessingMode = theRowProcessingMode; + } + + public Date getStatusTime() { + return myStatusTime; + } + + public void setStatusTime(Date theStatusTime) { + myStatusTime = theStatusTime; + } + + public int getFileCount() { + return myFileCount; + } + + public void setFileCount(int theFileCount) { + myFileCount = theFileCount; + } + + public String getJobId() { + return myJobId; + } + + public void setJobId(String theJobId) { + myJobId = theJobId; + } + + public BulkImportJobStatusEnum getStatus() { + return myStatus; + } + + /** + * Sets the status, updates the status time, and clears the status message + */ + public void setStatus(BulkImportJobStatusEnum theStatus) { + if (myStatus != theStatus) { + myStatus = theStatus; + setStatusTime(new Date()); + setStatusMessage(null); + } + } + + public String getStatusMessage() { + return myStatusMessage; + } + + public void setStatusMessage(String theStatusMessage) { + myStatusMessage = left(theStatusMessage, BulkExportJobEntity.STATUS_MESSAGE_LEN); + } + + public BulkImportJobJson toJson() { + return new BulkImportJobJson() + .setProcessingMode(getRowProcessingMode()) + .setFileCount(getFileCount()) + .setJobDescription(getJobDescription()); + } + + public int getBatchSize() { + return myBatchSize; + } + + public void setBatchSize(int theBatchSize) { + myBatchSize = theBatchSize; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java new file mode 100644 index 00000000000..b1dd778a2c8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java @@ -0,0 +1,104 @@ +package ca.uhn.fhir.jpa.entity; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.ForeignKey; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Index; +import javax.persistence.JoinColumn; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; + +@Entity +@Table(name = "HFJ_BLK_IMPORT_JOBFILE", indexes = { + @Index(name = "IDX_BLKIM_JOBFILE_JOBID", columnList = "JOB_PID") +}) +public class BulkImportJobFileEntity implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOBFILE_PID") + @SequenceGenerator(name = "SEQ_BLKIMJOBFILE_PID", sequenceName = "SEQ_BLKIMJOBFILE_PID") + @Column(name = "PID") + private Long myId; + + @ManyToOne + @JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKIMJOBFILE_JOB")) + private BulkImportJobEntity myJob; + + @Column(name = "FILE_SEQ", nullable = false) + private int myFileSequence; + + @Lob + @Column(name = "JOB_CONTENTS", nullable = false) + private byte[] myContents; + + @Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH) + private String myTenantName; + + public BulkImportJobEntity getJob() { + return myJob; + } + + public void setJob(BulkImportJobEntity theJob) { + myJob = theJob; + } + + public int getFileSequence() { + return myFileSequence; + } + + public void setFileSequence(int theFileSequence) { + myFileSequence = theFileSequence; + } + + public String getContents() { + return new String(myContents, StandardCharsets.UTF_8); + } + + public void setContents(String theContents) { + myContents = theContents.getBytes(StandardCharsets.UTF_8); + } + + + public BulkImportJobFileJson toJson() { + return new BulkImportJobFileJson() + .setContents(getContents()) + .setTenantName(getTenantName()); + } + + public void setTenantName(String theTenantName) { + myTenantName = theTenantName; + } + + public String getTenantName() { + return myTenantName; + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java new file mode 100644 index 00000000000..f3e6a6a130d --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java @@ -0,0 +1,58 @@ +package ca.uhn.fhir.jpa.bulk; + +import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.fail; + +public class BaseBatchJobR4Test extends BaseJpaR4Test { + + private static final Logger ourLog = LoggerFactory.getLogger(BaseBatchJobR4Test.class); + @Autowired + private JobExplorer myJobExplorer; + + protected List awaitAllBulkJobCompletions(String... theJobNames) { + assert theJobNames.length > 0; + + List bulkExport = new ArrayList<>(); + for (String nextName : theJobNames) { + bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(nextName, 0, 100)); + } + if (bulkExport.isEmpty()) { + List wantNames = Arrays.asList(theJobNames); + List haveNames = myJobExplorer.getJobNames(); + fail("There are no jobs running - Want names " + wantNames + " and have names " + haveNames); + } + List bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList()); + awaitJobCompletions(bulkExportExecutions); + + return bulkExportExecutions; + } + + protected void awaitJobCompletions(Collection theJobs) { + theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution)); + } + + protected void awaitJobCompletion(JobExecution theJobExecution) { + await().atMost(120, TimeUnit.SECONDS).until(() -> { + JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId()); + ourLog.info("JobExecution {} currently has status: {}- Failures if any: {}", theJobExecution.getId(), jobExecution.getStatus(), jobExecution.getFailureExceptions()); + return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED; + }); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index ede41a213e8..2c216b9074d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -2,11 +2,11 @@ package ca.uhn.fhir.jpa.bulk; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.client.apache.ResourceEntity; @@ -188,7 +188,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID) - .setStatus(BulkJobStatusEnum.BUILDING) + .setStatus(BulkExportJobStatusEnum.BUILDING) .setStatusTime(InstantType.now().getValue()); when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); @@ -212,7 +212,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID) - .setStatus(BulkJobStatusEnum.ERROR) + .setStatus(BulkExportJobStatusEnum.ERROR) .setStatusTime(InstantType.now().getValue()) .setStatusMessage("Some Error Message"); when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); @@ -239,7 +239,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID) - .setStatus(BulkJobStatusEnum.COMPLETE) + .setStatus(BulkExportJobStatusEnum.COMPLETE) .setStatusTime(InstantType.now().getValue()); jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111")); jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222")); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 58fd8a39f5b..fdc92d090e9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -6,15 +6,14 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder; -import ca.uhn.fhir.jpa.bulk.job.GroupBulkExportJobParametersBuilder; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobParametersBuilder; +import ca.uhn.fhir.jpa.bulk.export.job.GroupBulkExportJobParametersBuilder; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; -import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; @@ -46,28 +45,22 @@ import org.hl7.fhir.r4.model.Reference; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.explore.JobExplorer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import java.util.Arrays; -import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.awaitility.Awaitility.await; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -78,7 +71,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { +public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { public static final String TEST_FILTER = "Patient?gender=female"; private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class); @@ -92,8 +85,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { private IBulkDataExportSvc myBulkDataExportSvc; @Autowired private IBatchJobSubmitter myBatchJobSubmitter; - @Autowired - private JobExplorer myJobExplorer; @Autowired @Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME) @@ -128,7 +119,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue(); BulkExportJobEntity job = new BulkExportJobEntity(); - job.setStatus(BulkJobStatusEnum.COMPLETE); + job.setStatus(BulkExportJobStatusEnum.COMPLETE); job.setExpiry(DateUtils.addHours(new Date(), -1)); job.setJobId(UUID.randomUUID().toString()); job.setCreated(new Date()); @@ -241,6 +232,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); return options; } + @Test public void testSubmit_ReusesExisting() { @@ -278,7 +270,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); // Run a scheduled pass to build the export myBulkDataExportSvc.buildExportFiles(); @@ -287,7 +279,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.ERROR, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.ERROR, status.getStatus()); assertThat(status.getStatusMessage(), containsString("help i'm a bug")); } finally { @@ -295,6 +287,14 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } } + private void awaitAllBulkJobCompletions() { + awaitAllBulkJobCompletions( + BatchJobsConfig.BULK_EXPORT_JOB_NAME, + BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, + BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME + ); + } + @Test public void testGenerateBulkExport_SpecificResources() { @@ -313,7 +313,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_typeFilter=" + UrlUtil.escapeUrlParam(TEST_FILTER), status.getRequest()); // Run a scheduled pass to build the export @@ -323,7 +323,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); // Iterate over the files for (IBulkDataExportSvc.FileEntry next : status.getFiles()) { @@ -368,7 +368,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson", status.getRequest()); // Run a scheduled pass to build the export @@ -378,7 +378,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(5, status.getFiles().size()); // Iterate over the files @@ -393,7 +393,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } else if ("Observation".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n")); assertEquals(26, nextContents.split("\n").length); - }else if ("Immunization".equals(next.getResourceType())) { + } else if ("Immunization".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"patient\":{\"reference\":\"Patient/PAT0\"}}\n")); assertEquals(26, nextContents.split("\n").length); } else if ("CareTeam".equals(next.getResourceType())) { @@ -428,7 +428,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(5)); } @@ -451,7 +451,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Patient&_typeFilter=Patient%3F_has%3AObservation%3Apatient%3Aidentifier%3DSYS%7CVAL3", status.getRequest()); // Run a scheduled pass to build the export @@ -461,7 +461,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(1, status.getFiles().size()); // Iterate over the files @@ -481,7 +481,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testGenerateBulkExport_WithSince() throws InterruptedException { + public void testGenerateBulkExport_WithSince() { // Create some resources to load createResources(); @@ -508,7 +508,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_since=" + cutoff.setTimeZoneZulu(true).getValueAsString(), status.getRequest()); // Run a scheduled pass to build the export @@ -518,7 +518,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(1, status.getFiles().size()); // Iterate over the files @@ -560,24 +560,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { String jobUUID = (String) jobExecution.getExecutionContext().get("jobUUID"); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobUUID); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); } - public void awaitAllBulkJobCompletions() { - List bulkExport = myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.BULK_EXPORT_JOB_NAME, 0, 100); - bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, 0, 100)); - bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME, 0, 100)); - if (bulkExport.isEmpty()) { - fail("There are no bulk export jobs running!"); - } - List bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList()); - awaitJobCompletions(bulkExportExecutions); - } - - public void awaitJobCompletions(Collection theJobs) { - theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution)); - } @Test public void testBatchJobSubmitsAndRuns() throws Exception { @@ -599,13 +585,13 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitJobCompletion(jobExecution); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); } @Test - public void testGroupBatchJobWorks() throws Exception { + public void testGroupBatchJobWorks() { createResources(); // Create a bulk job @@ -625,7 +611,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -639,8 +625,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(nextContents, is(containsString("IMM6"))); assertThat(nextContents, is(containsString("IMM8"))); } + @Test - public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() throws Exception { + public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() { createResources(); // Create a bulk job @@ -659,7 +646,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -716,7 +703,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitJobCompletion(jobExecution); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -747,7 +734,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // CareTeam has two patient references: participant and patient. This test checks if we find the patient if participant is null but patient is not null @Test - public void testGroupBatchJobCareTeam() throws Exception { + public void testGroupBatchJobCareTeam() { createResources(); BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); @@ -766,7 +753,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("CareTeam"))); @@ -810,7 +797,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -847,7 +834,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Observation"))); String nextContents = getBinaryContents(jobInfo, 0); @@ -888,7 +875,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitAllBulkJobCompletions(); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Patient"))); @@ -900,7 +887,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testMdmExpansionWorksForGroupExportOnMatchedPatients() throws JobParametersInvalidException { + public void testMdmExpansionWorksForGroupExportOnMatchedPatients() { createResources(); // Create a bulk job @@ -918,9 +905,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitAllBulkJobCompletions(); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId +"&_mdm=true", jobInfo.getRequest()); + assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId + "&_mdm=true", jobInfo.getRequest()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -963,7 +950,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testGroupBulkExportSupportsTypeFilters() throws JobParametersInvalidException { + public void testGroupBulkExportSupportsTypeFilters() { createResources(); //Only get COVID-19 vaccinations @@ -985,7 +972,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -1021,7 +1008,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE))); //Group-style bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); @@ -1030,7 +1017,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE))); //System-style bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); @@ -1038,7 +1025,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE))); } @Test @@ -1077,14 +1064,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } - private void awaitJobCompletion(JobExecution theJobExecution) { - await().atMost(120, TimeUnit.SECONDS).until(() -> { - JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId()); - ourLog.info("JobExecution {} currently has status: {}", theJobExecution.getId(), jobExecution.getStatus()); - return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED; - }); - } - private void createResources() { Group group = new Group(); group.setId("G0"); @@ -1109,7 +1088,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { linkToGoldenResource(goldenPid, sourcePid); //Only add half the patients to the group. - if (i % 2 == 0 ) { + if (i % 2 == 0) { group.addMember().setEntity(new Reference(patId)); } @@ -1119,7 +1098,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createCareTeamWithIndex(i, patId); } - myPatientGroupId = myGroupDao.update(group).getId(); + myPatientGroupId = myGroupDao.update(group).getId(); //Manually create another golden record Patient goldenPatient2 = new Patient(); @@ -1153,8 +1132,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { patient.setGender(i % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE); patient.addName().setFamily("FAM" + i); patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i); - DaoMethodOutcome patientOutcome = myPatientDao.update(patient); - return patientOutcome; + return myPatientDao.update(patient); } private void createCareTeamWithIndex(int i, IIdType patId) { @@ -1167,7 +1145,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { private void createImmunizationWithIndex(int i, IIdType patId) { Immunization immunization = new Immunization(); immunization.setId("IMM" + i); - if (patId != null ) { + if (patId != null) { immunization.setPatient(new Reference(patId)); } if (i % 2 == 0) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java new file mode 100644 index 00000000000..dcee246154c --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java @@ -0,0 +1,155 @@ +package ca.uhn.fhir.jpa.bulk.imprt.svc; + +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.batch.BatchJobsConfig; +import ca.uhn.fhir.jpa.bulk.BaseBatchJobR4Test; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.test.utilities.ITestDataBuilder; +import ca.uhn.fhir.util.BundleBuilder; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.springframework.batch.core.JobExecution; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.Nonnull; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDataBuilder { + + @Autowired + private IBulkDataImportSvc mySvc; + @Autowired + private IBulkImportJobDao myBulkImportJobDao; + @Autowired + private IBulkImportJobFileDao myBulkImportJobFileDao; + + @AfterEach + public void after() { + myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof IAnonymousInterceptor); + } + + @Test + public void testFlow_TransactionRows() { + int transactionsPerFile = 10; + int fileCount = 10; + List files = createInputFiles(transactionsPerFile, fileCount); + + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setJobDescription("This is the description"); + job.setBatchSize(3); + String jobId = mySvc.createNewJob(job, files); + mySvc.markJobAsReadyForActivation(jobId); + + boolean activateJobOutcome = mySvc.activateNextReadyJob(); + assertTrue(activateJobOutcome); + + List executions = awaitAllBulkJobCompletions(); + assertEquals(1, executions.size()); + assertEquals("This is the description", executions.get(0).getJobParameters().getString(BulkExportJobConfig.JOB_DESCRIPTION)); + + runInTransaction(() -> { + List jobs = myBulkImportJobDao.findAll(); + assertEquals(0, jobs.size()); + + List jobFiles = myBulkImportJobFileDao.findAll(); + assertEquals(0, jobFiles.size()); + + }); + + IBundleProvider searchResults = myPatientDao.search(SearchParameterMap.newSynchronous()); + assertEquals(transactionsPerFile * fileCount, searchResults.sizeOrThrowNpe()); + + } + + @Test + public void testFlow_WithTenantNamesInInput() { + int transactionsPerFile = 5; + int fileCount = 10; + List files = createInputFiles(transactionsPerFile, fileCount); + for (int i = 0; i < fileCount; i++) { + files.get(i).setTenantName("TENANT" + i); + } + + IAnonymousInterceptor interceptor = mock(IAnonymousInterceptor.class); + myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, interceptor); + + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(5); + String jobId = mySvc.createNewJob(job, files); + mySvc.markJobAsReadyForActivation(jobId); + + boolean activateJobOutcome = mySvc.activateNextReadyJob(); + assertTrue(activateJobOutcome); + + awaitAllBulkJobCompletions(); + + ArgumentCaptor paramsCaptor = ArgumentCaptor.forClass(HookParams.class); + verify(interceptor, times(50)).invoke(any(), paramsCaptor.capture()); + List tenantNames = paramsCaptor + .getAllValues() + .stream() + .map(t -> t.get(RequestDetails.class).getTenantId()) + .distinct() + .sorted() + .collect(Collectors.toList()); + assertThat(tenantNames, containsInAnyOrder( + "TENANT0", "TENANT1", "TENANT2", "TENANT3", "TENANT4", "TENANT5", "TENANT6", "TENANT7", "TENANT8", "TENANT9" + )); + } + + + @Nonnull + private List createInputFiles(int transactionsPerFile, int fileCount) { + List files = new ArrayList<>(); + for (int fileIndex = 0; fileIndex < fileCount; fileIndex++) { + StringBuilder fileContents = new StringBuilder(); + + for (int transactionIdx = 0; transactionIdx < transactionsPerFile; transactionIdx++) { + BundleBuilder bundleBuilder = new BundleBuilder(myFhirCtx); + IBaseResource patient = buildPatient(withFamily("FAM " + fileIndex + " " + transactionIdx)); + bundleBuilder.addTransactionCreateEntry(patient); + fileContents.append(myFhirCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(bundleBuilder.getBundle())); + fileContents.append("\n"); + } + + BulkImportJobFileJson nextFile = new BulkImportJobFileJson(); + nextFile.setContents(fileContents.toString()); + files.add(nextFile); + } + return files; + } + + protected List awaitAllBulkJobCompletions() { + return awaitAllBulkJobCompletions(BatchJobsConfig.BULK_IMPORT_JOB_NAME); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java new file mode 100644 index 00000000000..5bc80f28024 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java @@ -0,0 +1,145 @@ +package ca.uhn.fhir.jpa.bulk.imprt.svc; + +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao; +import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; +import com.google.common.collect.Lists; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.blankString; +import static org.hamcrest.Matchers.not; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class BulkDataImportSvcImplTest extends BaseJpaR4Test { + + @Autowired + private IBulkDataImportSvc mySvc; + @Autowired + private IBulkImportJobDao myBulkImportJobDao; + @Autowired + private IBulkImportJobFileDao myBulkImportJobFileDao; + + @Test + public void testCreateNewJob() { + + // Create job + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(3); + BulkImportJobFileJson file1 = new BulkImportJobFileJson(); + file1.setContents("contents 1"); + BulkImportJobFileJson file2 = new BulkImportJobFileJson(); + file2.setContents("contents 2"); + String jobId = mySvc.createNewJob(job, Lists.newArrayList(file1, file2)); + assertThat(jobId, not(blankString())); + + // Add file + BulkImportJobFileJson file3 = new BulkImportJobFileJson(); + file3.setContents("contents 3"); + mySvc.addFilesToJob(jobId, Lists.newArrayList(file3)); + + runInTransaction(() -> { + List jobs = myBulkImportJobDao.findAll(); + assertEquals(1, jobs.size()); + assertEquals(jobId, jobs.get(0).getJobId()); + assertEquals(3, jobs.get(0).getFileCount()); + assertEquals(BulkImportJobStatusEnum.STAGING, jobs.get(0).getStatus()); + + List files = myBulkImportJobFileDao.findAllForJob(jobId); + assertEquals(3, files.size()); + + }); + } + + @Test + public void testCreateNewJob_InvalidJob_NoContents() { + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(3); + BulkImportJobFileJson file1 = new BulkImportJobFileJson(); + try { + mySvc.createNewJob(job, Lists.newArrayList(file1)); + } catch (UnprocessableEntityException e) { + assertEquals("Job File Contents mode must not be null", e.getMessage()); + } + } + + @Test + public void testCreateNewJob_InvalidJob_NoProcessingMode() { + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(3); + BulkImportJobFileJson file1 = new BulkImportJobFileJson(); + file1.setContents("contents 1"); + try { + mySvc.createNewJob(job, Lists.newArrayList(file1)); + } catch (UnprocessableEntityException e) { + assertEquals("Job File Processing mode must not be null", e.getMessage()); + } + } + + @Test + public void testAddFilesToJob_InvalidId() { + BulkImportJobFileJson file3 = new BulkImportJobFileJson(); + file3.setContents("contents 3"); + try { + mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3)); + } catch (InvalidRequestException e) { + assertEquals("Unknown job ID: ABCDEFG", e.getMessage()); + } + } + + @Test + public void testAddFilesToJob_WrongStatus() { + runInTransaction(() -> { + BulkImportJobEntity entity = new BulkImportJobEntity(); + entity.setFileCount(1); + entity.setJobId("ABCDEFG"); + entity.setStatus(BulkImportJobStatusEnum.RUNNING); + entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + myBulkImportJobDao.save(entity); + }); + + BulkImportJobFileJson file3 = new BulkImportJobFileJson(); + file3.setContents("contents 3"); + try { + mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3)); + } catch (InvalidRequestException e) { + assertEquals("Job ABCDEFG has status RUNNING and can not be added to", e.getMessage()); + } + } + + @Test + public void testActivateJob() { + runInTransaction(() -> { + BulkImportJobEntity entity = new BulkImportJobEntity(); + entity.setFileCount(1); + entity.setJobId("ABCDEFG"); + entity.setStatus(BulkImportJobStatusEnum.STAGING); + entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + myBulkImportJobDao.save(entity); + }); + + mySvc.markJobAsReadyForActivation("ABCDEFG"); + + runInTransaction(() -> { + List jobs = myBulkImportJobDao.findAll(); + assertEquals(1, jobs.size()); + assertEquals(BulkImportJobStatusEnum.READY, jobs.get(0).getStatus()); + }); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java index 8f22d6dda97..83c339af0bf 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java @@ -9,7 +9,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.BaseConfig; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.entity.TermConcept; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java index 63f61b740b1..301f5476e7a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java @@ -85,7 +85,7 @@ public class TransactionProcessorTest { .setUrl("/MedicationKnowledge"); try { - myTransactionProcessor.transaction(null, input); + myTransactionProcessor.transaction(null, input, false); fail(); } catch (InvalidRequestException e) { assertEquals("Resource MedicationKnowledge is not supported on this server. Supported resource types: []", e.getMessage()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java index 483e0874f02..5992f9df117 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java @@ -8,7 +8,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSubscription; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestDstu2Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java index 9cd366f3b1e..e4803f648d5 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java @@ -785,7 +785,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest { // try { Bundle resp = mySystemDao.transaction(mySrd, request); assertEquals(1, resp.getEntry().size()); - assertEquals("404 Not Found", resp.getEntry().get(0).getResponse().getStatus()); + assertEquals("204 No Content", resp.getEntry().get(0).getResponse().getStatus()); // fail(); // } catch (ResourceNotFoundException e) { @@ -1159,11 +1159,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest { } assertEquals("201 Created", resp.getEntry().get(2).getResponse().getStatus()); assertThat(resp.getEntry().get(2).getResponse().getLocation(), startsWith("Patient/")); - if (pass == 0) { - assertEquals("404 Not Found", resp.getEntry().get(3).getResponse().getStatus()); - } else { - assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus()); - } + assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus()); Bundle respGetBundle = (Bundle) resp.getEntry().get(0).getResource(); assertEquals(1, respGetBundle.getEntry().size()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java index 109a59cdad0..0953336ff66 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java @@ -13,7 +13,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestDstu3Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index bf257e1f5ed..9cb19833adf 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -17,7 +17,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; @@ -51,24 +51,19 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; -import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; -import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; -import ca.uhn.fhir.jpa.provider.r4.BaseJpaResourceProviderObservationR4; import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4; -import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; @@ -77,7 +72,6 @@ import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; -import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; @@ -95,11 +89,9 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.util.ClasspathUtil; -import ca.uhn.fhir.util.ResourceUtil; import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ValidationResult; -import org.apache.commons.io.IOUtils; import org.hibernate.search.mapper.orm.Search; import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport; @@ -168,7 +160,6 @@ import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r5.utils.IResourceValidator; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; @@ -182,7 +173,6 @@ import org.springframework.transaction.PlatformTransactionManager; import javax.persistence.EntityManager; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java index 7e71ceab8ad..79904f2d15f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java @@ -39,7 +39,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java index 4f0eb913e03..d01756a7265 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java @@ -7,7 +7,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4WithLuceneDisabledConfig; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java index c5ba2df8890..c4b556bc6ed 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java index 24951ce3060..a7d4f7a5278 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java @@ -18,6 +18,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; @@ -60,10 +61,10 @@ import org.hl7.fhir.r4.model.Quantity; import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.Resource; import org.hl7.fhir.r4.model.ValueSet; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; @@ -109,8 +110,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { public void after() { myDaoConfig.setAllowInlineMatchUrlReferences(false); myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete()); - myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED); - } + myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED); + } @BeforeEach public void beforeDisableResultReuse() { @@ -549,7 +550,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { myValueSetDao.create(vs, mySrd); sleepUntilTimeChanges(); - + ResourceTable entity = new TransactionTemplate(myTxManager).execute(t -> myEntityManager.find(ResourceTable.class, id.getIdPartAsLong())); assertEquals(Long.valueOf(1), entity.getIndexStatus()); @@ -568,9 +569,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { * so it indexes the newest resource one more time. It wouldn't be a big deal * if this ever got fixed so that it ends up with 2 instead of 3. */ - runInTransaction(()->{ + runInTransaction(() -> { Optional reindexCount = myResourceReindexJobDao.getReindexCount(jobId); - assertEquals(3, reindexCount.orElseThrow(()->new NullPointerException("No job " + jobId)).intValue()); + assertEquals(3, reindexCount.orElseThrow(() -> new NullPointerException("No job " + jobId)).intValue()); }); // Try making the resource unparseable @@ -626,7 +627,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { searchParamMap.add(Patient.SP_FAMILY, new StringParam("family2")); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); - runInTransaction(()->{ + runInTransaction(() -> { ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3); assertNotNull(historyEntry); myResourceHistoryTableDao.delete(historyEntry); @@ -656,7 +657,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1")); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); - runInTransaction(()->{ + runInTransaction(() -> { myEntityManager .createQuery("UPDATE ResourceIndexedSearchParamString s SET s.myHashNormalizedPrefix = 0") .executeUpdate(); @@ -671,7 +672,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { myResourceReindexingSvc.markAllResourcesForReindexing(); myResourceReindexingSvc.forceReindexingPass(); - runInTransaction(()->{ + runInTransaction(() -> { ResourceIndexedSearchParamString param = myResourceIndexedSearchParamStringDao.findAll() .stream() .filter(t -> t.getParamName().equals("family")) @@ -694,7 +695,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1")); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); - runInTransaction(()->{ + runInTransaction(() -> { Long i = myEntityManager .createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class) .getSingleResult(); @@ -714,7 +715,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { myResourceReindexingSvc.markAllResourcesForReindexing(); myResourceReindexingSvc.forceReindexingPass(); - runInTransaction(()->{ + runInTransaction(() -> { Long i = myEntityManager .createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class) .getSingleResult(); @@ -808,6 +809,30 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { assertEquals("201 Created", resp.getEntry().get(0).getResponse().getStatus()); } + + @Test + public void testNestedTransaction_ReadsBlocked() { + String methodName = "testTransactionBatchWithFailingRead"; + Bundle request = new Bundle(); + request.setType(BundleType.TRANSACTION); + + Patient p = new Patient(); + p.addName().setFamily(methodName); + request.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST); + + request.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl("Patient?identifier=foo"); + + try { + runInTransaction(()->{ + mySystemDao.transactionNested(mySrd, request); + }); + fail(); + } catch (InvalidRequestException e) { + assertEquals("Can not invoke read operation on nested transaction", e.getMessage()); + } + } + + @Test public void testTransactionBatchWithFailingRead() { String methodName = "testTransactionBatchWithFailingRead"; @@ -923,8 +948,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { Bundle outcome = mySystemDao.transaction(mySrd, request); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus()); - assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); - assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); + assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); + assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); validate(outcome); } @@ -942,8 +967,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { Bundle outcome = mySystemDao.transaction(mySrd, request); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus()); - assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); - assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); + assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); + assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); validate(outcome); } @@ -2272,7 +2297,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { patient2.addIdentifier().setSystem("urn:system").setValue("testPersistWithSimpleLinkP02"); request.addEntry().setResource(patient2).getRequest().setMethod(HTTPVerb.POST); - assertThrows(InvalidRequestException.class, ()->{ + assertThrows(InvalidRequestException.class, () -> { mySystemDao.transaction(mySrd, request); }); } @@ -3198,9 +3223,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { assertEquals("1", id2.getVersionIdPart()); assertEquals(id.getValue(), id2.getValue()); - + } - + @Test public void testTransactionWithIfMatch() { Patient p = new Patient(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index 8d40288bc2b..b9192bf7b0f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -16,7 +16,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR5Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java index 7680e88baec..66ac0d415cb 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; diff --git a/hapi-fhir-jpaserver-batch/pom.xml b/hapi-fhir-jpaserver-batch/pom.xml index b4436c319ec..a08e196fe26 100644 --- a/hapi-fhir-jpaserver-batch/pom.xml +++ b/hapi-fhir-jpaserver-batch/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java new file mode 100644 index 00000000000..4224e215332 --- /dev/null +++ b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java @@ -0,0 +1,32 @@ +package ca.uhn.fhir.jpa.batch; + +/*- + * #%L + * HAPI FHIR JPA Server - Batch Task Processor + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class BatchConstants { + + /** + * Non instantiable + */ + private BatchConstants() {} + + public static final String JOB_LAUNCHING_TASK_EXECUTOR = "jobLaunchingTaskExecutor"; + +} diff --git a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java index 936eb9d12ab..27eb2518893 100644 --- a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java +++ b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.batch.config; * #L% */ +import ca.uhn.fhir.jpa.batch.BatchConstants; import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer; import org.springframework.batch.core.explore.JobExplorer; import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean; @@ -39,7 +40,7 @@ public class NonPersistedBatchConfigurer extends DefaultBatchConfigurer { private PlatformTransactionManager myHapiPlatformTransactionManager; @Autowired - @Qualifier("jobLaunchingTaskExecutor") + @Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR) private TaskExecutor myTaskExecutor; private MapJobRepositoryFactoryBean myJobRepositoryFactory; diff --git a/hapi-fhir-jpaserver-cql/pom.xml b/hapi-fhir-jpaserver-cql/pom.xml index 440cd4e30e4..20069c8fa7e 100644 --- a/hapi-fhir-jpaserver-cql/pom.xml +++ b/hapi-fhir-jpaserver-cql/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -144,13 +144,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 236fc33d3ab..3295a49c94a 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -55,13 +55,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml index ab2347ac99c..68316dcf0db 100644 --- a/hapi-fhir-jpaserver-migrate/pom.xml +++ b/hapi-fhir-jpaserver-migrate/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 4e14b8594d7..e3975350931 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.tasks; */ import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask; import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask; @@ -91,6 +92,32 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.1", "INDEX_STATUS").nullable().type(ColumnTypeEnum.LONG); version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.2", "SOURCE_DIRECT_PARENT_PIDS").nullable().type(ColumnTypeEnum.CLOB); version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.3", "SOURCE_PID").nullable().type(ColumnTypeEnum.LONG); + + // Bulk Import Job + Builder.BuilderAddTableByColumns blkImportJobTable = version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID"); + blkImportJobTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); + blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, Search.UUID_COLUMN_LENGTH); + blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10); + blkImportJobTable.addColumn("STATUS_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); + blkImportJobTable.addColumn("STATUS_MESSAGE").nullable().type(ColumnTypeEnum.STRING, 500); + blkImportJobTable.addColumn("OPTLOCK").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobTable.addColumn("FILE_COUNT").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobTable.addColumn("ROW_PROCESSING_MODE").nonNullable().type(ColumnTypeEnum.STRING, 20); + blkImportJobTable.addColumn("BATCH_SIZE").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobTable.addIndex("20210410.2", "IDX_BLKIM_JOB_ID").unique(true).withColumns("JOB_ID"); + version.addIdGenerator("20210410.3", "SEQ_BLKIMJOB_PID"); + + // Bulk Import Job File + Builder.BuilderAddTableByColumns blkImportJobFileTable = version.addTableByColumns("20210410.4", "HFJ_BLK_IMPORT_JOBFILE", "PID"); + blkImportJobFileTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); + blkImportJobFileTable.addColumn("JOB_PID").nonNullable().type(ColumnTypeEnum.LONG); + blkImportJobFileTable.addColumn("JOB_CONTENTS").nonNullable().type(ColumnTypeEnum.BLOB); + blkImportJobFileTable.addColumn("FILE_SEQ").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobFileTable.addColumn("TENANT_NAME").nullable().type(ColumnTypeEnum.STRING, 200); + blkImportJobFileTable.addIndex("20210410.5", "IDX_BLKIM_JOBFILE_JOBID").unique(false).withColumns("JOB_PID"); + blkImportJobFileTable.addForeignKey("20210410.6", "FK_BLKIMJOBFILE_JOB").toColumn("JOB_PID").references("HFJ_BLK_IMPORT_JOB", "PID"); + version.addIdGenerator("20210410.7", "SEQ_BLKIMJOBFILE_PID"); + } private void init530() { diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 8c915ff27c6..b46518bed86 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 0cccf93a45b..5dc3b7d17f4 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index 633843691b3..fc862aea074 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index e00e23537da..b66f8e9615e 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index ffbe7a1164c..183e9a6b602 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml @@ -164,7 +164,7 @@ ca.uhn.hapi.fhir hapi-fhir-converter - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java index e2c52f3b321..a7e2c06d36a 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; +import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.provider.DiffProvider; import ca.uhn.fhir.jpa.provider.GraphQLProvider; diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 6f7664a8f48..adc4be9c0ab 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 2524d2960e8..1e71fc77421 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 58483804fd3..d0905db3a66 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index bb0460f5f2c..919e895664c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index ec97e7f6b0f..622de27723a 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-sample-client-okhttp diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index b20cdc12fac..ad0f6ed0cfb 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-sample-server-jersey diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 1643433e6ef..02e9d448d79 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-samples diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 04d4fe39150..c210826be2c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index b3490ba702b..38c1789454d 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index d80fff6935b..c8054db0e2b 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 412d69d642c..6659bcd46c2 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java index d1acfeb7369..0bb1e0e3f31 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java @@ -1,7 +1,5 @@ package ca.uhn.fhir.narrative; -import ca.uhn.fhir.util.TestUtil; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -9,60 +7,54 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class BaseThymeleafNarrativeGeneratorDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseThymeleafNarrativeGeneratorDstu2Test.class); - @AfterAll - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); - } - - @Test public void testTrimWhitespace() { //@formatter:off - String input = "

    \n" + - "
    \n" + - " \n" + - " joe \n" + - " john \n" + - " BLOW \n" + - " \n" + - "
    \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - "
    Identifier123456
    Address\n" + - " \n" + - " 123 Fake Street
    \n" + - " \n" + - " \n" + - " Unit 1
    \n" + - " \n" + - " Toronto\n" + - " ON\n" + - " Canada\n" + - "
    Date of birth\n" + - " 31 March 2014\n" + - "
    \n" + - "
    "; + String input = "
    \n" + + "
    \n" + + " \n" + + " joe \n" + + " john \n" + + " BLOW \n" + + " \n" + + "
    \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + "
    Identifier123456
    Address\n" + + " \n" + + " 123 Fake Street
    \n" + + " \n" + + " \n" + + " Unit 1
    \n" + + " \n" + + " Toronto\n" + + " ON\n" + + " Canada\n" + + "
    Date of birth\n" + + " 31 March 2014\n" + + "
    \n" + + "
    "; //@formatter:on String actual = BaseThymeleafNarrativeGenerator.cleanWhitespace(input); String expected = "
    joe john BLOW
    Identifier123456
    Address123 Fake Street
    Unit 1
    TorontoONCanada
    Date of birth31 March 2014
    "; - + ourLog.info(actual); - + assertEquals(expected, actual); } diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java index c6709a2128b..a3742c8ea60 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java @@ -1,9 +1,9 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.model.dstu2.resource.Practitioner; -import ca.uhn.fhir.util.TestUtil; -import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import static org.hamcrest.MatcherAssert.assertThat; @@ -13,20 +13,19 @@ public class CustomThymeleafNarrativeGeneratorDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorDstu2Test.class); - private static FhirContext ourCtx = FhirContext.forDstu2(); + private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU2); - @AfterAll - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); } - @Test public void testGenerator() { // CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("file:src/test/resources/narrative/customnarrative.properties"); CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customnarrative_dstu2.properties"); - ourCtx.setNarrativeGenerator(gen); + myCtx.setNarrativeGenerator(gen); Practitioner p = new Practitioner(); p.addIdentifier().setSystem("sys").setValue("val1"); @@ -34,7 +33,7 @@ public class CustomThymeleafNarrativeGeneratorDstu2Test { p.addAddress().addLine("line1").addLine("line2"); p.getName().addFamily("fam1").addGiven("given"); - gen.populateResourceNarrative(ourCtx, p); + gen.populateResourceNarrative(myCtx, p); String actual = p.getText().getDiv().getValueAsString(); ourLog.info(actual); diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java index 4b5015d9f5b..58beb7f4a0b 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; import ca.uhn.fhir.model.dstu2.composite.QuantityDt; @@ -22,6 +23,7 @@ import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.util.TestUtil; import org.hamcrest.core.StringContains; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -35,7 +37,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class DefaultThymeleafNarrativeGeneratorDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorDstu2Test.class); - private static FhirContext ourCtx = FhirContext.forDstu2(); + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU2); private DefaultThymeleafNarrativeGenerator myGen; @BeforeEach @@ -43,9 +45,15 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { myGen = new DefaultThymeleafNarrativeGenerator(); myGen.setUseHapiServerConformanceNarrative(true); - ourCtx.setNarrativeGenerator(myGen); + myCtx.setNarrativeGenerator(myGen); } + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } + + @Test public void testGeneratePatient() throws DataFormatException { Patient value = new Patient(); @@ -57,7 +65,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { value.setBirthDate(new Date(), TemporalPrecisionEnum.DAY); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, StringContains.containsString("
    joe john BLOW
    ")); @@ -69,7 +77,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { Parameters value = new Parameters(); value.setId("123"); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, not(containsString("narrative"))); @@ -89,9 +97,9 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { " \n" + ""; - OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, parse); + OperationOutcome oo = myCtx.newXmlParser().parseResource(OperationOutcome.class, parse); - myGen.populateResourceNarrative(ourCtx, oo); + myGen.populateResourceNarrative(myCtx, oo); String output = oo.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -129,7 +137,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { value.addResult().setResource(obs); } - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -137,7 +145,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { // Now try it with the parser - output = ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(value); + output = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(value); ourLog.info(output); assertThat(output, StringContains.containsString("
    Some & Diagnostic Report
    ")); } @@ -154,7 +162,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { mp.setStatus(MedicationOrderStatusEnum.ACTIVE); mp.setDateWritten(new DateTimeDt("2014-09-01")); - myGen.populateResourceNarrative(ourCtx, mp); + myGen.populateResourceNarrative(myCtx, mp); String output = mp.getText().getDiv().getValueAsString(); assertTrue(output.contains("ciprofloaxin"), "Expected medication name of ciprofloaxin within narrative: " + output); @@ -167,7 +175,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { Medication med = new Medication(); med.getCode().setText("ciproflaxin"); - myGen.populateResourceNarrative(ourCtx, med); + myGen.populateResourceNarrative(myCtx, med); String output = med.getText().getDiv().getValueAsString(); assertThat(output, containsString("ciproflaxin")); diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index 24f6bfd53d9..04fc5a8cbc3 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java index 9d99461d525..c5dceef79ac 100644 --- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java +++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.util.TestUtil; import org.apache.commons.collections.Transformer; @@ -11,6 +12,7 @@ import org.hl7.fhir.dstu3.model.DiagnosticReport.DiagnosticReportStatus; import org.hl7.fhir.dstu3.model.MedicationRequest.MedicationRequestStatus; import org.hl7.fhir.dstu3.model.Observation.ObservationStatus; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -28,7 +30,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class DefaultThymeleafNarrativeGeneratorDstu3Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorDstu3Test.class); - private static FhirContext ourCtx = FhirContext.forDstu3(); + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU3); private DefaultThymeleafNarrativeGenerator myGen; @BeforeEach @@ -36,9 +38,15 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { myGen = new DefaultThymeleafNarrativeGenerator(); myGen.setUseHapiServerConformanceNarrative(true); - ourCtx.setNarrativeGenerator(myGen); + myCtx.setNarrativeGenerator(myGen); } + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } + + @Test public void testGeneratePatient() throws DataFormatException { Patient value = new Patient(); @@ -51,7 +59,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { value.setBirthDate(new Date()); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, StringContains.containsString("
    joe john BLOW
    ")); @@ -95,7 +103,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { } }); - customGen.populateResourceNarrative(ourCtx, value); + customGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, StringContains.containsString("Some beautiful proze")); @@ -111,7 +119,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { value.addResult().setReference("Observation/2"); value.addResult().setReference("Observation/3"); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -133,13 +141,13 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { ""; //@formatter:on - OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, parse); + OperationOutcome oo = myCtx.newXmlParser().parseResource(OperationOutcome.class, parse); // String output = gen.generateTitle(oo); // ourLog.info(output); // assertEquals("Operation Outcome (2 issues)", output); - myGen.populateResourceNarrative(ourCtx, oo); + myGen.populateResourceNarrative(myCtx, oo); String output = oo.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -177,7 +185,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { value.addResult().setResource(obs); } - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -240,8 +248,8 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { " }"; - DiagnosticReport value = ourCtx.newJsonParser().parseResource(DiagnosticReport.class, input); - myGen.populateResourceNarrative(ourCtx, value); + DiagnosticReport value = myCtx.newJsonParser().parseResource(DiagnosticReport.class, input); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -261,7 +269,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { mp.setStatus(MedicationRequestStatus.ACTIVE); mp.setAuthoredOnElement(new DateTimeType("2014-09-01")); - myGen.populateResourceNarrative(ourCtx, mp); + myGen.populateResourceNarrative(myCtx, mp); String output = mp.getText().getDiv().getValueAsString(); assertTrue(output.contains("ciprofloaxin"), "Expected medication name of ciprofloaxin within narrative: " + output); @@ -274,7 +282,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { Medication med = new Medication(); med.getCode().setText("ciproflaxin"); - myGen.populateResourceNarrative(ourCtx, med); + myGen.populateResourceNarrative(myCtx, med); String output = med.getText().getDiv().getValueAsString(); assertThat(output, containsString("ciproflaxin")); diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 035dabec631..8383bdda70a 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 67dd6dfeb59..e0793555c76 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java index 6b355c7332e..957551d03c5 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java @@ -1,11 +1,13 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.util.TestUtil; import org.hl7.fhir.r4.model.Practitioner; import org.hl7.fhir.r4.model.Quantity; import org.hl7.fhir.r4.model.StringType; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,86 +16,89 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class CustomThymeleafNarrativeGeneratorR4Test { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorR4Test.class); + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorR4Test.class); - /** Don't use cached here since we modify the context */ - private FhirContext myCtx = FhirContext.forR4(); + /** + * Don't use cached here since we modify the context + */ + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); - /** - * Implement narrative for standard type - */ - @Test - public void testStandardType() { + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } - CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/standardtypes_r4.properties"); - myCtx.setNarrativeGenerator(gen); + /** + * Implement narrative for standard type + */ + @Test + public void testStandardType() { - Practitioner p = new Practitioner(); - p.addIdentifier().setSystem("sys").setValue("val1"); - p.addIdentifier().setSystem("sys").setValue("val2"); - p.addAddress().addLine("line1").addLine("line2"); - p.addName().setFamily("fam1").addGiven("given"); + CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/standardtypes_r4.properties"); + myCtx.setNarrativeGenerator(gen); - gen.populateResourceNarrative(myCtx, p); + Practitioner p = new Practitioner(); + p.addIdentifier().setSystem("sys").setValue("val1"); + p.addIdentifier().setSystem("sys").setValue("val2"); + p.addAddress().addLine("line1").addLine("line2"); + p.addName().setFamily("fam1").addGiven("given"); - String actual = p.getText().getDiv().getValueAsString(); - ourLog.info(actual); + gen.populateResourceNarrative(myCtx, p); - assertThat(actual, containsString("

    Name

    given FAM1

    Address

    line1
    line2
    ")); + String actual = p.getText().getDiv().getValueAsString(); + ourLog.info(actual); - } + assertThat(actual, containsString("

    Name

    given FAM1

    Address

    line1
    line2
    ")); - @Test - public void testCustomType() { + } - CustomPatient patient = new CustomPatient(); - patient.setActive(true); - FavouritePizzaExtension parentExtension = new FavouritePizzaExtension(); - parentExtension.setToppings(new StringType("Mushrooms, Onions")); - parentExtension.setSize(new Quantity(null, 14, "http://unitsofmeasure", "[in_i]", "Inches")); - patient.setFavouritePizza(parentExtension); + @Test + public void testCustomType() { - String output = myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); - ourLog.info("Encoded: {}", output); + CustomPatient patient = new CustomPatient(); + patient.setActive(true); + FavouritePizzaExtension parentExtension = new FavouritePizzaExtension(); + parentExtension.setToppings(new StringType("Mushrooms, Onions")); + parentExtension.setSize(new Quantity(null, 14, "http://unitsofmeasure", "[in_i]", "Inches")); + patient.setFavouritePizza(parentExtension); - String expectedEncoding = "{\n" + - " \"resourceType\": \"Patient\",\n" + - " \"meta\": {\n" + - " \"profile\": [ \"http://custom_patient\" ]\n" + - " },\n" + - " \"extension\": [ {\n" + - " \"url\": \"http://example.com/favourite_pizza\",\n" + - " \"extension\": [ {\n" + - " \"url\": \"toppings\",\n" + - " \"valueString\": \"Mushrooms, Onions\"\n" + - " }, {\n" + - " \"url\": \"size\",\n" + - " \"valueQuantity\": {\n" + - " \"value\": 14,\n" + - " \"unit\": \"Inches\",\n" + - " \"system\": \"http://unitsofmeasure\",\n" + - " \"code\": \"[in_i]\"\n" + - " }\n" + - " } ]\n" + - " } ],\n" + - " \"active\": true\n" + - "}"; - assertEquals(expectedEncoding, output); + String output = myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); + ourLog.info("Encoded: {}", output); - CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customtypes_r4.properties"); - myCtx.setNarrativeGenerator(gen); - gen.populateResourceNarrative(myCtx, patient); + String expectedEncoding = "{\n" + + " \"resourceType\": \"Patient\",\n" + + " \"meta\": {\n" + + " \"profile\": [ \"http://custom_patient\" ]\n" + + " },\n" + + " \"extension\": [ {\n" + + " \"url\": \"http://example.com/favourite_pizza\",\n" + + " \"extension\": [ {\n" + + " \"url\": \"toppings\",\n" + + " \"valueString\": \"Mushrooms, Onions\"\n" + + " }, {\n" + + " \"url\": \"size\",\n" + + " \"valueQuantity\": {\n" + + " \"value\": 14,\n" + + " \"unit\": \"Inches\",\n" + + " \"system\": \"http://unitsofmeasure\",\n" + + " \"code\": \"[in_i]\"\n" + + " }\n" + + " } ]\n" + + " } ],\n" + + " \"active\": true\n" + + "}"; + assertEquals(expectedEncoding, output); - String actual = patient.getText().getDiv().getValueAsString(); - ourLog.info(actual); + CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customtypes_r4.properties"); + myCtx.setNarrativeGenerator(gen); + gen.populateResourceNarrative(myCtx, patient); - String expected = "

    CustomPatient

    Favourite Pizza

    Toppings: Mushrooms, Onions Size: 14
    "; - assertEquals(expected, actual); + String actual = patient.getText().getDiv().getValueAsString(); + ourLog.info(actual); - } + String expected = "

    CustomPatient

    Favourite Pizza

    Toppings: Mushrooms, Onions Size: 14
    "; + assertEquals(expected, actual); + + } - @AfterAll - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); - } } diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java index 433b448a448..4847d922123 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java @@ -10,6 +10,7 @@ import org.hl7.fhir.r4.model.DiagnosticReport.DiagnosticReportStatus; import org.hl7.fhir.r4.model.MedicationRequest.MedicationRequestStatus; import org.hl7.fhir.r4.model.Observation.ObservationStatus; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -22,7 +23,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class DefaultThymeleafNarrativeGeneratorR4Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorR4Test.class); - private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); private DefaultThymeleafNarrativeGenerator myGen; @BeforeEach @@ -33,6 +34,11 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { myCtx.setNarrativeGenerator(myGen); } + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } + @Test public void testGeneratePatient() throws DataFormatException { Patient value = new Patient(); diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 8760ce684e3..f564ed044b8 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index 654c33c28fe..ac4521d10e5 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java index ee503727c3e..61563e7ccb4 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java @@ -154,7 +154,7 @@ public interface ITestDataBuilder { } } - default IBaseResource buildResource(String theResourceType, Consumer[] theModifiers) { + default IBaseResource buildResource(String theResourceType, Consumer... theModifiers) { IBaseResource resource = getFhirContext().getResourceDefinition(theResourceType).newInstance(); for (Consumer next : theModifiers) { next.accept(resource); diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 1dfb066f20b..0a140bc05ab 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 6aae3187f80..4db0f72591a 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index ecdbbd4a635..a3b7462547d 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 560106316d0..33d67b4e94a 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 68ae7002bdd..5f830ffce69 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 9ac87f4b79d..dbc5e9359b8 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index b180c3de905..aafc58b15ba 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 5e946ea683b..6daf487888f 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml @@ -58,37 +58,37 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu3 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-hl7org-dstu2 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r4 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r5 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu2 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu3 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-r4 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT org.apache.velocity diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index b4a180ec8ca..6b9aa6546e9 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index d53c158c6e0..723188cb297 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. https://hapifhir.io @@ -761,20 +761,21 @@ 1.2.0 4.2.5 1.2 - 2.7.0 - 1.14 + 3.0.1 + 1.15 1.20 - 1.7 - 2.6 - 3.9 + 1.9 + 2.8.0 + 3.12.0 1.2 1.5.0 10.14.2.0 2.5.1 + 3.9.0 0.7.9 - 30.1-jre - 2.8.5 + 30.1.1-jre + 2.8.6 2.2.11_1 2.3.1 2.3.0.1 @@ -786,17 +787,17 @@ 3.0.2 5.7.0 6.5.4 - 5.4.26.Final - 6.0.0.Final + 5.4.30.Final + 6.0.2.Final 8.7.0 2.2 6.1.5.Final 4.4.13 4.5.13 - 2.12.1 - 2.11.3 - 3.1.0 + 2.12.3 + ${jackson_version} + 3.3.0 1.8 3.8.1 4.0.0.Beta3 @@ -807,15 +808,15 @@ 1.2_5 1.7.30 2.11.1 - 5.3.3 + 5.3.6 - 2.4.2 - 4.2.3.RELEASE + 2.4.7 + 4.3.2 2.4.1 1.2.2.RELEASE 3.1.4 - 3.0.11.RELEASE + 3.0.12.RELEASE 4.4.1 @@ -999,7 +1000,7 @@ org.jetbrains annotations - 19.0.0 + 20.1.0 commons-io @@ -1150,7 +1151,7 @@ org.apache.commons commons-dbcp2 - 2.7.0 + 2.8.0 org.apache.commons @@ -1312,7 +1313,7 @@ com.fasterxml.woodstox woodstox-core - 6.2.3 + 6.2.5 org.ebaysf.web @@ -1398,7 +1399,7 @@ org.fusesource.jansi jansi - 2.1.1 + 2.3.2 org.glassfish @@ -1553,12 +1554,12 @@ org.mockito mockito-core - 3.6.28 + ${mockito_version} org.mockito mockito-junit-jupiter - 3.3.3 + ${mockito_version} org.postgresql @@ -1817,18 +1818,10 @@ true - - com.gemnasium - gemnasium-maven-plugin - 0.2.0 - - github.com/hapifhir/hapi-fhir - - org.basepom.maven duplicate-finder-maven-plugin - 1.4.0 + 1.5.0 de.jpdigital @@ -1889,12 +1882,12 @@ org.apache.maven.plugins maven-javadoc-plugin - 3.1.1 + 3.2.0 org.apache.maven.plugins maven-jar-plugin - 3.1.2 + 3.2.0 org.apache.maven.plugins @@ -1909,7 +1902,7 @@ org.apache.maven.plugins maven-plugin-plugin - 3.5 + 3.6.0 org.apache.maven.plugins @@ -1919,14 +1912,7 @@ org.apache.maven.plugins maven-source-plugin - 3.1.0 - - - org.codehaus.plexus - plexus-utils - 3.1.0 - - + 3.2.1 org.apache.maven.plugins @@ -1948,7 +1934,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.0.0 + 3.2.0 org.codehaus.mojo @@ -1981,7 +1967,7 @@ org.codehaus.mojo versions-maven-plugin - 2.7 + 2.8.1 false @@ -2110,7 +2096,7 @@ org.apache.maven.plugins maven-checkstyle-plugin - 3.1.0 + 3.1.2 com.puppycrawl.tools @@ -2143,7 +2129,7 @@ - 3.3.9 + 3.5.4 11 diff --git a/restful-server-example/pom.xml b/restful-server-example/pom.xml index 37c4a874033..7640191ae97 100644 --- a/restful-server-example/pom.xml +++ b/restful-server-example/pom.xml @@ -8,7 +8,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 4e51e91c572..2ef7b674d4b 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 20a96ef3c9e..85242f389d3 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 897bcbb0b07..9716634f826 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../pom.xml From b294f7d20837b6649eeb50959f4e38a6c2f27bfd Mon Sep 17 00:00:00 2001 From: jamesagnew Date: Wed, 14 Apr 2021 18:00:52 -0400 Subject: [PATCH 43/61] License headers --- .../fhir/context/phonetic/NumericEncoder.java | 20 +++++++++++++++++++ .../mdm/rules/matcher/NumericMatcher.java | 20 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java index 1619748d470..f293977b318 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.context.phonetic; +/*- + * #%L + * HAPI FHIR - Core Library + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import com.google.common.base.CharMatcher; // Useful for numerical identifiers like phone numbers, address parts etc. diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java index 82bce7d59c0..92a2c558e4b 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/NumericMatcher.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.mdm.rules.matcher; +/*- + * #%L + * HAPI FHIR - Master Data Management + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.context.phonetic.NumericEncoder; // Useful for numerical identifiers like phone numbers, address parts etc. From 6b43410514622b923fc368409bae5aaea59ae110 Mon Sep 17 00:00:00 2001 From: jamesagnew Date: Wed, 14 Apr 2021 18:50:47 -0400 Subject: [PATCH 44/61] Add missing constant --- .../src/main/java/ca/uhn/fhir/util/VersionEnum.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java index 8af93635c43..773cc30bd26 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java @@ -68,7 +68,9 @@ public enum VersionEnum { V5_2_0, V5_2_1, V5_3_0, - V5_4_0; + V5_3_2, + V5_4_0, + ; public static VersionEnum latestVersion() { VersionEnum[] values = VersionEnum.values(); From 120605ebababb956890dd1d9d46c24c2438b91de Mon Sep 17 00:00:00 2001 From: jamesagnew Date: Wed, 14 Apr 2021 19:56:25 -0400 Subject: [PATCH 45/61] Add to task --- .../ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java | 1 + 1 file changed, 1 insertion(+) diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index e3975350931..13bb76eb723 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -100,6 +100,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10); blkImportJobTable.addColumn("STATUS_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); blkImportJobTable.addColumn("STATUS_MESSAGE").nullable().type(ColumnTypeEnum.STRING, 500); + blkImportJobTable.addColumn("JOB_DESC").nullable().type(ColumnTypeEnum.STRING, 500); blkImportJobTable.addColumn("OPTLOCK").nonNullable().type(ColumnTypeEnum.INT); blkImportJobTable.addColumn("FILE_COUNT").nonNullable().type(ColumnTypeEnum.INT); blkImportJobTable.addColumn("ROW_PROCESSING_MODE").nonNullable().type(ColumnTypeEnum.STRING, 20); From 9a67f3ee0cffe22ae718619ec187e3f23ba12393 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Apr 2021 20:09:37 -0400 Subject: [PATCH 46/61] Bump spring_boot_version from 2.4.1 to 2.4.4 (#2550) Bumps `spring_boot_version` from 2.4.1 to 2.4.4. Updates `spring-boot-starter-test` from 2.4.1 to 2.4.4 - [Release notes](https://github.com/spring-projects/spring-boot/releases) - [Commits](https://github.com/spring-projects/spring-boot/compare/v2.4.1...v2.4.4) Updates `spring-boot-test` from 2.4.1 to 2.4.4 - [Release notes](https://github.com/spring-projects/spring-boot/releases) - [Commits](https://github.com/spring-projects/spring-boot/compare/v2.4.1...v2.4.4) Updates `spring-boot-maven-plugin` from 2.4.1 to 2.4.4 - [Release notes](https://github.com/spring-projects/spring-boot/releases) - [Commits](https://github.com/spring-projects/spring-boot/compare/v2.4.1...v2.4.4) Updates `spring-boot-dependencies` from 2.4.1 to 2.4.4 - [Release notes](https://github.com/spring-projects/spring-boot/releases) - [Commits](https://github.com/spring-projects/spring-boot/compare/v2.4.1...v2.4.4) Updates `spring-boot-autoconfigure` from 2.4.1 to 2.4.4 - [Release notes](https://github.com/spring-projects/spring-boot/releases) - [Commits](https://github.com/spring-projects/spring-boot/compare/v2.4.1...v2.4.4) Updates `spring-boot-configuration-processor` from 2.4.1 to 2.4.4 - [Release notes](https://github.com/spring-projects/spring-boot/releases) - [Commits](https://github.com/spring-projects/spring-boot/compare/v2.4.1...v2.4.4) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 723188cb297..1a7e01bbe03 100644 --- a/pom.xml +++ b/pom.xml @@ -812,7 +812,7 @@ 2.4.7 4.3.2 - 2.4.1 + 2.4.4 1.2.2.RELEASE 3.1.4 From 05b1323638dac3d1e53657ffbeaff3cff28231b1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Apr 2021 08:21:33 -0400 Subject: [PATCH 47/61] Bump jarchivelib from 1.0.0 to 1.1.0 (#2553) Bumps [jarchivelib](https://github.com/thrau/jarchivelib) from 1.0.0 to 1.1.0. - [Release notes](https://github.com/thrau/jarchivelib/releases) - [Commits](https://github.com/thrau/jarchivelib/compare/v1.0.0...v1.1.0) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 1a7e01bbe03..d1f94242138 100644 --- a/pom.xml +++ b/pom.xml @@ -1393,7 +1393,7 @@ org.rauschig jarchivelib - 1.0.0 + 1.1.0 test From 42c89ccedd15bc8757bf4d3e72c0077f5c5936db Mon Sep 17 00:00:00 2001 From: jamesagnew Date: Thu, 15 Apr 2021 08:22:44 -0400 Subject: [PATCH 48/61] Changelog --- .../main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml index c973340bfd7..928093b5dd3 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml @@ -21,5 +21,6 @@
  • Commons DBCP2 (JPA): 2.7.0 -> 2.8.0
  • Thymeleaf (Testpage Overlay): 3.0.11.RELEASE -> 3.0.12.RELEASE
  • JAnsi (CLI): 2.1.1 -> 2.3.2
  • +
  • JArchivelib (CLI): 1.0.0 -> 1.1.0
  • " From d94611edf62d17b17608c1b6f6073c29634d4a41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Apr 2021 08:23:30 -0400 Subject: [PATCH 49/61] Bump junit_version from 5.7.0 to 5.7.1 (#2554) Bumps `junit_version` from 5.7.0 to 5.7.1. Updates `junit-jupiter` from 5.7.0 to 5.7.1 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.7.0...r5.7.1) Updates `junit-jupiter-api` from 5.7.0 to 5.7.1 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.7.0...r5.7.1) Updates `junit-jupiter-engine` from 5.7.0 to 5.7.1 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.7.0...r5.7.1) Updates `junit-jupiter-params` from 5.7.0 to 5.7.1 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.7.0...r5.7.1) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index d1f94242138..a30317cecab 100644 --- a/pom.xml +++ b/pom.xml @@ -785,7 +785,7 @@ 9.4.39.v20210325 3.0.2 - 5.7.0 + 5.7.1 6.5.4 5.4.30.Final 6.0.2.Final From 7cabcbd772a604317a19e73d40c3cfe7c106ac42 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 16 Apr 2021 11:12:40 -0400 Subject: [PATCH 50/61] Fix partition selection for system request details --- .../bulk/export/job/GroupBulkItemReader.java | 12 ++-- .../bulk/export/job/ResourceToFileWriter.java | 3 +- .../partition/RequestPartitionHelperSvc.java | 30 ++++++++- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 61 ++++++++++++++++--- .../uhn/fhir/jpa/model/util/JpaConstants.java | 5 ++ 5 files changed, 95 insertions(+), 16 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java index 3a10fec2aae..acac73df135 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java @@ -29,6 +29,8 @@ import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; +import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; @@ -178,13 +180,13 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade * @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"] */ private List getMembers() { - IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId)); + SystemRequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setTenantId(JpaConstants.ALL_PARTITIONS_NAME); + IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails); List evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class); return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList()); } - - /** * Given the local myGroupId, perform an expansion to retrieve all resource IDs of member patients. * if myMdmEnabled is set to true, we also reach out to the IMdmLinkDao to attempt to also expand it into matched @@ -194,7 +196,9 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade */ private Set expandAllPatientPidsFromGroup() { Set expandedIds = new HashSet<>(); - IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId)); + SystemRequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setTenantId(JpaConstants.ALL_PARTITIONS_NAME); + IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), new SystemRequestDetails()); Long pidOrNull = myIdHelperService.getPidOrNull(group); //Attempt to perform MDM Expansion of membership diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java index 8b4ebe7e86a..df362a79f19 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java @@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.BinaryUtil; @@ -100,7 +101,7 @@ public class ResourceToFileWriter implements ItemWriter> { IBaseBinary binary = BinaryUtil.newBinary(myFhirContext); binary.setContentType(Constants.CT_FHIR_NDJSON); binary.setContent(myOutputStream.toByteArray()); - DaoMethodOutcome outcome = myBinaryDao.create(binary); + DaoMethodOutcome outcome = myBinaryDao.create(binary, new SystemRequestDetails()); return outcome.getResource().getIdElement(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 37da051117a..94d360f1b31 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -44,6 +44,7 @@ import java.util.HashSet; import java.util.List; import java.util.Objects; +import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooks; import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooksAndReturnObject; import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.hasHooks; @@ -101,6 +102,19 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { return RequestPartitionId.defaultPartition(); } + //Shortcircuit and write system calls out to default partition. + if (theRequest instanceof SystemRequestDetails) { + if (theRequest.getTenantId() != null) { + if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) { + return RequestPartitionId.allPartitions(); + } else { + return RequestPartitionId.fromPartitionName(theRequest.getTenantId()); + } + } else { + return RequestPartitionId.defaultPartition(); + } + } + // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) { HookParams params = new HookParams() @@ -129,7 +143,21 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { if (myPartitionSettings.isPartitioningEnabled()) { - // Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE + //Shortcircuit and write system calls out to default partition. + if (theRequest instanceof SystemRequestDetails) { + if (theRequest.getTenantId() != null) { + if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) { + return RequestPartitionId.allPartitions(); + } else { + return RequestPartitionId.fromPartitionName(theRequest.getTenantId()); + } + } else { + return RequestPartitionId.defaultPartition(); + } + } + + + // Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE HookParams params = new HookParams() .add(IBaseResource.class, theResource) .add(RequestDetails.class, theRequest) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index fdc92d090e9..902e5019e98 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -18,6 +18,7 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.MdmLink; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.parser.IParser; @@ -494,7 +495,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { Patient patient = new Patient(); patient.setId("PAT" + i); patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i); - myPatientDao.update(patient).getId().toUnqualifiedVersionless(); + myPatientDao.update(patient, new SystemRequestDetails()).getId().toUnqualifiedVersionless(); } // Create a bulk job @@ -848,7 +849,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { public String getBinaryContents(IBulkDataExportSvc.JobInfo theJobInfo, int theIndex) { // Iterate over the files - Binary nextBinary = myBinaryDao.read(theJobInfo.getFiles().get(theIndex).getResourceId()); + Binary nextBinary = myBinaryDao.read(theJobInfo.getFiles().get(theIndex).getResourceId(), new SystemRequestDetails()); assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); ourLog.info("Next contents for type {}:\n{}", nextBinary.getResourceType(), nextContents); @@ -928,7 +929,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { //Check Observation Content - Binary observationExportContent = myBinaryDao.read(jobInfo.getFiles().get(1).getResourceId()); + Binary observationExportContent = myBinaryDao.read(jobInfo.getFiles().get(1).getResourceId(), new SystemRequestDetails()); assertEquals(Constants.CT_FHIR_NDJSON, observationExportContent.getContentType()); nextContents = new String(observationExportContent.getContent(), Constants.CHARSET_UTF8); ourLog.info("Next contents for type {}:\n{}", observationExportContent.getResourceType(), nextContents); @@ -1061,7 +1062,47 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { //Now if we create another one and ask for the cache, we should get the most-recently-insert entry. IBulkDataExportSvc.JobInfo jobInfo10 = myBulkDataExportSvc.submitJob(options, true); assertThat(jobInfo10.getJobId(), is(equalTo(jobInfo9.getJobId()))); + } + @Test + public void testBulkExportWritesToDEFAULTPartitionWhenPartitioningIsEnabled() { + myPartitionSettings.setPartitioningEnabled(true); + createResources(); + + //Only get COVID-19 vaccinations + Set filters = new HashSet<>(); + filters.add("Immunization?vaccine-code=vaccines|COVID-19"); + + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(null); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Immunization")); + bulkDataExportOptions.setSince(null); + bulkDataExportOptions.setFilters(filters); + bulkDataExportOptions.setGroupId(myPatientGroupId); + bulkDataExportOptions.setExpandMdm(true); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getFiles().size(), equalTo(1)); + assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); + + // Check immunization Content + String nextContents = getBinaryContents(jobInfo, 0); + + assertThat(nextContents, is(containsString("IMM1"))); + assertThat(nextContents, is(containsString("IMM3"))); + assertThat(nextContents, is(containsString("IMM5"))); + assertThat(nextContents, is(containsString("IMM7"))); + assertThat(nextContents, is(containsString("IMM9"))); + assertThat(nextContents, is(containsString("IMM999"))); + + assertThat(nextContents, is(not(containsString("Flu")))); } private void createResources() { @@ -1071,7 +1112,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { //Manually create a golden record Patient goldenPatient = new Patient(); goldenPatient.setId("PAT999"); - DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient); + DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient, new SystemRequestDetails()); Long goldenPid = myIdHelperService.getPidOrNull(g1Outcome.getResource()); //Create our golden records' data. @@ -1098,12 +1139,12 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { createCareTeamWithIndex(i, patId); } - myPatientGroupId = myGroupDao.update(group).getId(); + myPatientGroupId = myGroupDao.update(group, new SystemRequestDetails()).getId(); //Manually create another golden record Patient goldenPatient2 = new Patient(); goldenPatient2.setId("PAT888"); - DaoMethodOutcome g2Outcome = myPatientDao.update(goldenPatient2); + DaoMethodOutcome g2Outcome = myPatientDao.update(goldenPatient2, new SystemRequestDetails()); Long goldenPid2 = myIdHelperService.getPidOrNull(g2Outcome.getResource()); //Create some nongroup patients MDM linked to a different golden resource. They shouldnt be included in the query. @@ -1132,14 +1173,14 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { patient.setGender(i % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE); patient.addName().setFamily("FAM" + i); patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i); - return myPatientDao.update(patient); + return myPatientDao.update(patient, new SystemRequestDetails()); } private void createCareTeamWithIndex(int i, IIdType patId) { CareTeam careTeam = new CareTeam(); careTeam.setId("CT" + i); careTeam.setSubject(new Reference(patId)); // This maps to the "patient" search parameter on CareTeam - myCareTeamDao.update(careTeam); + myCareTeamDao.update(careTeam, new SystemRequestDetails()); } private void createImmunizationWithIndex(int i, IIdType patId) { @@ -1157,7 +1198,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { cc.addCoding().setSystem("vaccines").setCode("COVID-19"); immunization.setVaccineCode(cc); } - myImmunizationDao.update(immunization); + myImmunizationDao.update(immunization, new SystemRequestDetails()); } private void createObservationWithIndex(int i, IIdType patId) { @@ -1168,7 +1209,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { if (patId != null) { obs.getSubject().setReference(patId.getValue()); } - myObservationDao.update(obs); + myObservationDao.update(obs, new SystemRequestDetails()); } public void linkToGoldenResource(Long theGoldenPid, Long theSourcePid) { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index feb1fbf5bd2..2d7c10fa796 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -218,6 +218,11 @@ public class JpaConstants { */ public static final String DEFAULT_PARTITION_NAME = "DEFAULT"; + /** + * The name of the collection of all partitions + */ + public static final String ALL_PARTITIONS_NAME = "ALL_PARTITIONS"; + /** * Parameter for the $expand operation */ From efe5b7b14055c7c6335492508d5d1f8ff64fdc3c Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 16 Apr 2021 11:24:42 -0400 Subject: [PATCH 51/61] Fix package cache usage --- .../ca/uhn/fhir/jpa/packages/JpaPackageCache.java | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java index 6c64b331a17..76f621729cd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java @@ -65,7 +65,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.TransactionTemplate; @@ -655,16 +654,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac } private void deleteAndExpungeResourceBinary(IIdType theResourceBinaryId, ExpungeOptions theOptions) { - - if (myPartitionSettings.isPartitioningEnabled()) { - SystemRequestDetails requestDetails = new SystemRequestDetails(); - requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME); - getBinaryDao().delete(theResourceBinaryId, requestDetails).getEntity(); - getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, requestDetails); - } else { - getBinaryDao().delete(theResourceBinaryId).getEntity(); - getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, null); - } + getBinaryDao().delete(theResourceBinaryId, new SystemRequestDetails()).getEntity(); + getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, new SystemRequestDetails()); } From 8035d51e48ad4fdde97c8315a037d9dc1912d261 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 16 Apr 2021 11:31:43 -0400 Subject: [PATCH 52/61] Refactor, comment --- .../partition/RequestPartitionHelperSvc.java | 44 +++++++++++-------- 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 94d360f1b31..ceecbb9458d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -35,6 +35,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; import javax.annotation.Nonnull; @@ -104,15 +105,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { //Shortcircuit and write system calls out to default partition. if (theRequest instanceof SystemRequestDetails) { - if (theRequest.getTenantId() != null) { - if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) { - return RequestPartitionId.allPartitions(); - } else { - return RequestPartitionId.fromPartitionName(theRequest.getTenantId()); - } - } else { - return RequestPartitionId.defaultPartition(); - } + return getSystemRequestPartitionId(theRequest); } // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ @@ -133,6 +126,29 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { return RequestPartitionId.allPartitions(); } + /** + * Determine the partition for a System Call (defined by the fact that the request is of type SystemRequestDetails) + * + * 1. If the tenant ID is set to the constant for all partitions, return all partitions + * 2. If there is a tenant ID set in the request, use it. + * 3. Otherwise, return the Default Partition. + * + * @param theRequest The {@link SystemRequestDetails} + * @return the {@link RequestPartitionId} to be used for this request. + */ + @NotNull + private RequestPartitionId getSystemRequestPartitionId(@NotNull RequestDetails theRequest) { + if (theRequest.getTenantId() != null) { + if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) { + return RequestPartitionId.allPartitions(); + } else { + return RequestPartitionId.fromPartitionName(theRequest.getTenantId()); + } + } else { + return RequestPartitionId.defaultPartition(); + } + } + /** * Invoke the {@link Pointcut#STORAGE_PARTITION_IDENTIFY_CREATE} interceptor pointcut to determine the tenant for a create request. */ @@ -145,15 +161,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { //Shortcircuit and write system calls out to default partition. if (theRequest instanceof SystemRequestDetails) { - if (theRequest.getTenantId() != null) { - if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) { - return RequestPartitionId.allPartitions(); - } else { - return RequestPartitionId.fromPartitionName(theRequest.getTenantId()); - } - } else { - return RequestPartitionId.defaultPartition(); - } + return getSystemRequestPartitionId(theRequest); } From fad32aa636208152d641211422c921b6e448a185 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 16 Apr 2021 11:55:12 -0400 Subject: [PATCH 53/61] Partition management for expired jobs --- .../fhir/jpa/bulk/export/job/GroupBulkItemReader.java | 11 +++++++---- .../jpa/bulk/export/svc/BulkDataExportSvcImpl.java | 5 +++-- .../fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java | 7 ++++--- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java index acac73df135..ca6e3cbe699 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java @@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; -import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.util.QueryChunker; @@ -56,6 +55,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; + /** * Bulk Item reader for the Group Bulk Export job. * Instead of performing a normal query on the resource type using type filters, we instead @@ -120,7 +121,9 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade Set patientPidsToExport = new HashSet<>(pidsOrThrowException); if (myMdmEnabled) { - IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId)); + SystemRequestDetails srd = new SystemRequestDetails(); + srd.setTenantId(ALL_PARTITIONS_NAME); + IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), srd); Long pidOrNull = myIdHelperService.getPidOrNull(group); List goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH); goldenPidSourcePidTuple.forEach(tuple -> { @@ -181,7 +184,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade */ private List getMembers() { SystemRequestDetails requestDetails = new SystemRequestDetails(); - requestDetails.setTenantId(JpaConstants.ALL_PARTITIONS_NAME); + requestDetails.setTenantId(ALL_PARTITIONS_NAME); IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails); List evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class); return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList()); @@ -197,7 +200,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade private Set expandAllPatientPidsFromGroup() { Set expandedIds = new HashSet<>(); SystemRequestDetails requestDetails = new SystemRequestDetails(); - requestDetails.setTenantId(JpaConstants.ALL_PARTITIONS_NAME); + requestDetails.setTenantId(ALL_PARTITIONS_NAME); IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), new SystemRequestDetails()); Long pidOrNull = myIdHelperService.getPidOrNull(group); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java index 872c036f63a..1dc8d672af7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java @@ -42,6 +42,7 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; @@ -203,8 +204,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) { ourLog.info("Purging bulk data file: {}", nextFile.getResourceId()); - getBinaryDao().delete(toId(nextFile.getResourceId())); - getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), null); + getBinaryDao().delete(toId(nextFile.getResourceId()), new SystemRequestDetails()); + getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails()); myBulkExportCollectionFileDao.deleteByPid(nextFile.getId()); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 902e5019e98..432bab10282 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -117,7 +117,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { Binary b = new Binary(); b.setContent(new byte[]{0, 1, 2, 3}); - String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue(); + String binaryId = myBinaryDao.create(b, new SystemRequestDetails()).getId().toUnqualifiedVersionless().getValue(); BulkExportJobEntity job = new BulkExportJobEntity(); job.setStatus(BulkExportJobStatusEnum.COMPLETE); @@ -524,7 +524,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { // Iterate over the files for (IBulkDataExportSvc.FileEntry next : status.getFiles()) { - Binary nextBinary = myBinaryDao.read(next.getResourceId()); + Binary nextBinary = myBinaryDao.read(next.getResourceId(), new SystemRequestDetails()); assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); ourLog.info("Next contents for type {}:\n{}", next.getResourceType(), nextContents); @@ -1030,7 +1030,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { } @Test - public void testCacheSettingIsRespectedWhenCreatingNewJobs() { + public void testCacheSettingIsRespectedWhenCreatingNewJobs() throws InterruptedException { BulkDataExportOptions options = new BulkDataExportOptions(); options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); options.setResourceTypes(Sets.newHashSet("Procedure")); @@ -1049,6 +1049,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { IBulkDataExportSvc.JobInfo jobInfo6 = myBulkDataExportSvc.submitJob(options, false); IBulkDataExportSvc.JobInfo jobInfo7 = myBulkDataExportSvc.submitJob(options, false); IBulkDataExportSvc.JobInfo jobInfo8 = myBulkDataExportSvc.submitJob(options, false); + Thread.sleep(100L); //stupid commit timings. IBulkDataExportSvc.JobInfo jobInfo9 = myBulkDataExportSvc.submitJob(options, false); //First non-cached should retrieve new ID. From f91a4f9576cc6fcc3350bcf6c79663ac355e557c Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 16 Apr 2021 14:18:01 -0400 Subject: [PATCH 54/61] wip tidy implementaion --- .../jpa/packages/PackageInstallerSvcImpl.java | 2 +- .../partition/RequestPartitionHelperSvc.java | 30 ++++++++----------- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 6 +++- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java index beb787dc32e..7c044f667db 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java @@ -347,7 +347,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) { if (myPartitionSettings.isPartitioningEnabled()) { SystemRequestDetails requestDetails = new SystemRequestDetails(); - requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME); +// requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME); return theDao.search(theMap, requestDetails); } else { return theDao.search(theMap); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index ceecbb9458d..05707da2e10 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -103,10 +103,6 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { return RequestPartitionId.defaultPartition(); } - //Shortcircuit and write system calls out to default partition. - if (theRequest instanceof SystemRequestDetails) { - return getSystemRequestPartitionId(theRequest); - } // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) { @@ -118,6 +114,10 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { requestPartitionId = null; } + if (theRequest instanceof SystemRequestDetails) { + requestPartitionId = getSystemRequestPartitionId(theRequest); + } + validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ); return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest); @@ -159,23 +159,19 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { if (myPartitionSettings.isPartitioningEnabled()) { - //Shortcircuit and write system calls out to default partition. - if (theRequest instanceof SystemRequestDetails) { - return getSystemRequestPartitionId(theRequest); - } - - - // Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE - HookParams params = new HookParams() - .add(IBaseResource.class, theResource) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params); // Handle system requests boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType); - if (nonPartitionableResource && requestPartitionId == null) { + if (nonPartitionableResource) { requestPartitionId = RequestPartitionId.defaultPartition(); + } else if(theRequest instanceof SystemRequestDetails) { + requestPartitionId = getSystemRequestPartitionId(theRequest); + } else { + HookParams params = new HookParams()// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE + .add(IBaseResource.class, theResource) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params); } String resourceName = myFhirContext.getResourceType(theResource); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 432bab10282..ede4c871584 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -18,6 +18,7 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.MdmLink; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; @@ -1104,6 +1105,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { assertThat(nextContents, is(containsString("IMM999"))); assertThat(nextContents, is(not(containsString("Flu")))); + myPartitionSettings.setPartitioningEnabled(false); } private void createResources() { @@ -1113,7 +1115,9 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { //Manually create a golden record Patient goldenPatient = new Patient(); goldenPatient.setId("PAT999"); - DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient, new SystemRequestDetails()); + SystemRequestDetails srd = new SystemRequestDetails(); + srd.setTenantId(JpaConstants.ALL_PARTITIONS_NAME); + DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient, srd); Long goldenPid = myIdHelperService.getPidOrNull(g1Outcome.getResource()); //Create our golden records' data. From 3075a9b5e6c7cc537db77608ef3717149b81f92b Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 16 Apr 2021 16:53:23 -0400 Subject: [PATCH 55/61] Still minor refactoring --- ...revent-bulk-failure-while-partitioned.yaml | 4 ++ .../partition/RequestPartitionHelperSvc.java | 55 ++++++++++++++----- .../jpa/dao/r4/PartitioningSqlR4Test.java | 1 - 3 files changed, 44 insertions(+), 16 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2556-prevent-bulk-failure-while-partitioned.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2556-prevent-bulk-failure-while-partitioned.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2556-prevent-bulk-failure-while-partitioned.yaml new file mode 100644 index 00000000000..facbd901d31 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2556-prevent-bulk-failure-while-partitioned.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 2556 +title: "Fixed a bug which would cause Bulk Export to fail when run in a partitioned environment." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 05707da2e10..7e4bf4434eb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -36,6 +36,7 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.jetbrains.annotations.NotNull; +import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import javax.annotation.Nonnull; @@ -49,8 +50,11 @@ import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooks; import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooksAndReturnObject; import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.hasHooks; +import static org.slf4j.LoggerFactory.getLogger; public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { + private static final Logger ourLog = getLogger(RequestPartitionHelperSvc.class); + private final HashSet myNonPartitionableResourceNames; @@ -97,15 +101,18 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType) { RequestPartitionId requestPartitionId; + boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType); if (myPartitionSettings.isPartitioningEnabled()) { // Handle system requests - if ((theRequest == null && myNonPartitionableResourceNames.contains(theResourceType))) { + //TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through SystemRequestDetails instead. + if (theRequest == null && nonPartitionableResource) { return RequestPartitionId.defaultPartition(); } - - // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ - if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) { + if (theRequest instanceof SystemRequestDetails) { + requestPartitionId = getSystemRequestPartitionId(theRequest, nonPartitionableResource); + // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ + } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) { HookParams params = new HookParams() .add(RequestDetails.class, theRequest) .addIfMatchesType(ServletRequestDetails.class, theRequest); @@ -114,10 +121,6 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { requestPartitionId = null; } - if (theRequest instanceof SystemRequestDetails) { - requestPartitionId = getSystemRequestPartitionId(theRequest); - } - validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ); return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest); @@ -126,6 +129,26 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { return RequestPartitionId.allPartitions(); } + /** + * + * For system requests, read partition from tenant ID if present, otherwise set to DEFAULT. If the resource they are attempting to partition + * is non-partitionable scream in the logs and set the partition to DEFAULT. + * + * @param theRequest + * @param theNonPartitionableResource + * @return + */ + @NotNull + private RequestPartitionId getSystemRequestPartitionId(@NotNull RequestDetails theRequest, boolean theNonPartitionableResource) { + RequestPartitionId requestPartitionId; + requestPartitionId = getSystemRequestPartitionId(theRequest); + if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) { + ourLog.warn("System call is attempting to write a non-partitionable resource to a partition! This is a bug in your code! Setting partition to DEFAULT"); + requestPartitionId = RequestPartitionId.defaultPartition(); + } + return requestPartitionId; + } + /** * Determine the partition for a System Call (defined by the fact that the request is of type SystemRequestDetails) * @@ -158,20 +181,22 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { RequestPartitionId requestPartitionId; if (myPartitionSettings.isPartitioningEnabled()) { - - - // Handle system requests boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType); - if (nonPartitionableResource) { - requestPartitionId = RequestPartitionId.defaultPartition(); - } else if(theRequest instanceof SystemRequestDetails) { - requestPartitionId = getSystemRequestPartitionId(theRequest); + + if (theRequest instanceof SystemRequestDetails) { + requestPartitionId = getSystemRequestPartitionId(theRequest, nonPartitionableResource); } else { + //This is an external Request (e.g. ServletRequestDetails) so we want to figure out the partition via interceptor. HookParams params = new HookParams()// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE .add(IBaseResource.class, theResource) .add(RequestDetails.class, theRequest) .addIfMatchesType(ServletRequestDetails.class, theRequest); requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params); + + //If the interceptors haven't selected a partition, and its a non-partitionable resource anyhow, send to DEFAULT + if (nonPartitionableResource && requestPartitionId == null) { + requestPartitionId = RequestPartitionId.defaultPartition(); + } } String resourceName = myFhirContext.getResourceType(theResource); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java index cb5691a51be..c8814ae0e4a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java @@ -642,7 +642,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { assertEquals(myPartitionId, resourceTable.getPartitionId().getPartitionId().intValue()); assertEquals(myPartitionDate, resourceTable.getPartitionId().getPartitionDate()); }); - } @Test From 77e2768a14d3e8f3396480439ee43c6f77f438b6 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Sun, 18 Apr 2021 17:36:55 -0400 Subject: [PATCH 56/61] Address code review comments. Create new static builder for all partitions SRD --- .../jpa/bulk/export/job/GroupBulkItemReader.java | 13 ++++--------- .../jpa/partition/RequestPartitionHelperSvc.java | 3 +-- .../fhir/jpa/partition/SystemRequestDetails.java | 11 +++++++---- .../fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java | 4 +--- 4 files changed, 13 insertions(+), 18 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java index ca6e3cbe699..6b49a2134b7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java @@ -55,8 +55,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; - /** * Bulk Item reader for the Group Bulk Export job. * Instead of performing a normal query on the resource type using type filters, we instead @@ -121,8 +119,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade Set patientPidsToExport = new HashSet<>(pidsOrThrowException); if (myMdmEnabled) { - SystemRequestDetails srd = new SystemRequestDetails(); - srd.setTenantId(ALL_PARTITIONS_NAME); + SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions(); IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), srd); Long pidOrNull = myIdHelperService.getPidOrNull(group); List goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH); @@ -183,8 +180,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade * @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"] */ private List getMembers() { - SystemRequestDetails requestDetails = new SystemRequestDetails(); - requestDetails.setTenantId(ALL_PARTITIONS_NAME); + SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions(); IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails); List evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class); return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList()); @@ -199,9 +195,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade */ private Set expandAllPatientPidsFromGroup() { Set expandedIds = new HashSet<>(); - SystemRequestDetails requestDetails = new SystemRequestDetails(); - requestDetails.setTenantId(ALL_PARTITIONS_NAME); - IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), new SystemRequestDetails()); + SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions(); + IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails); Long pidOrNull = myIdHelperService.getPidOrNull(group); //Attempt to perform MDM Expansion of membership diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 7e4bf4434eb..4bcf6b1ddb5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -143,8 +143,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { RequestPartitionId requestPartitionId; requestPartitionId = getSystemRequestPartitionId(theRequest); if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) { - ourLog.warn("System call is attempting to write a non-partitionable resource to a partition! This is a bug in your code! Setting partition to DEFAULT"); - requestPartitionId = RequestPartitionId.defaultPartition(); + throw new InternalErrorException("System call is attempting to write a non-partitionable resource to a partition! This is a bug!") } return requestPartitionId; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/SystemRequestDetails.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/SystemRequestDetails.java index c2b3361eb0c..f194a1d8f73 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/SystemRequestDetails.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/SystemRequestDetails.java @@ -35,17 +35,15 @@ import ca.uhn.fhir.rest.server.IRestfulServerDefaults; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableListMultimap; -import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ListMultimap; -import com.google.common.collect.Multimap; -import com.google.common.collect.Multimaps; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.nio.charset.Charset; import java.util.List; -import java.util.Optional; + +import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; /** * A default RequestDetails implementation that can be used for system calls to @@ -104,6 +102,11 @@ public class SystemRequestDetails extends RequestDetails { } myHeaders.put(theName, theValue); } + public static SystemRequestDetails newSystemRequestAllPartitions() { + SystemRequestDetails systemRequestDetails = new SystemRequestDetails(); + systemRequestDetails.setTenantId(ALL_PARTITIONS_NAME); + return systemRequestDetails; + } @Override diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index ede4c871584..701de70bb04 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -18,7 +18,6 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.MdmLink; -import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; @@ -1115,8 +1114,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { //Manually create a golden record Patient goldenPatient = new Patient(); goldenPatient.setId("PAT999"); - SystemRequestDetails srd = new SystemRequestDetails(); - srd.setTenantId(JpaConstants.ALL_PARTITIONS_NAME); + SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions(); DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient, srd); Long goldenPid = myIdHelperService.getPidOrNull(g1Outcome.getResource()); From c54862e77293334f3b8f970eeda8487265a0bb31 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Sun, 18 Apr 2021 18:03:37 -0400 Subject: [PATCH 57/61] Coding around at the speed of sound occasionally causes compilation failures --- .../ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 4bcf6b1ddb5..ce725f71515 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -143,7 +143,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { RequestPartitionId requestPartitionId; requestPartitionId = getSystemRequestPartitionId(theRequest); if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) { - throw new InternalErrorException("System call is attempting to write a non-partitionable resource to a partition! This is a bug!") + throw new InternalErrorException("System call is attempting to write a non-partitionable resource to a partition! This is a bug!"); } return requestPartitionId; } From 89e56ecb98fa84e3969471ea178085f890564c1f Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 19 Apr 2021 09:14:12 -0400 Subject: [PATCH 58/61] Remove jetbrains annotation --- .../ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index ce725f71515..0daeaf201cd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -35,7 +35,6 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; @@ -138,8 +137,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { * @param theNonPartitionableResource * @return */ - @NotNull - private RequestPartitionId getSystemRequestPartitionId(@NotNull RequestDetails theRequest, boolean theNonPartitionableResource) { + private RequestPartitionId getSystemRequestPartitionId(RequestDetails theRequest, boolean theNonPartitionableResource) { RequestPartitionId requestPartitionId; requestPartitionId = getSystemRequestPartitionId(theRequest); if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) { From 52d161c3376e28e65b29a5aff7e548662eec82e8 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 19 Apr 2021 09:16:35 -0400 Subject: [PATCH 59/61] Add checkstyle to prevent future jetbrains annotations from sneaking in --- src/checkstyle/checkstyle.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/checkstyle/checkstyle.xml b/src/checkstyle/checkstyle.xml index 1c8800c913c..15d7b80d7a0 100644 --- a/src/checkstyle/checkstyle.xml +++ b/src/checkstyle/checkstyle.xml @@ -30,6 +30,16 @@ + + + + + + + + + + From 014fa052714a118f4004ca1e1b123e24ff4896f0 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 19 Apr 2021 09:17:35 -0400 Subject: [PATCH 60/61] Fix a present annotation --- .../java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java index 89a2dfbbfbf..c21285ab398 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java @@ -37,12 +37,12 @@ import org.hl7.fhir.r4.model.Quantity; import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.SearchParameter; import org.hl7.fhir.r4.model.StringType; -import org.jetbrains.annotations.Nullable; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; From 2ed4b01eb8f0dee52d7b53cb41aae5505ac3f3d6 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Mon, 19 Apr 2021 10:31:16 -0400 Subject: [PATCH 61/61] Remove notnull --- .../ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 0daeaf201cd..326111da44f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -156,8 +156,8 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { * @param theRequest The {@link SystemRequestDetails} * @return the {@link RequestPartitionId} to be used for this request. */ - @NotNull - private RequestPartitionId getSystemRequestPartitionId(@NotNull RequestDetails theRequest) { + @Nonnull + private RequestPartitionId getSystemRequestPartitionId(@Nonnull RequestDetails theRequest) { if (theRequest.getTenantId() != null) { if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) { return RequestPartitionId.allPartitions();