diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/2271-fix-load-ig-package-when-partitioning-enabled.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/2271-fix-load-ig-package-when-partitioning-enabled.yaml new file mode 100644 index 00000000000..9fcab05ada8 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/2271-fix-load-ig-package-when-partitioning-enabled.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 2271 +title: "Attempts to load IG packs when partitioning was enabled, resulted in nullpointer exceptions. + This has now been fixed and IG packs and conformance resources will be loaded to the DEFAULT partition." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/partitioning.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/partitioning.md index 1dc74cdbad7..63782c0d81c 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/partitioning.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/partitioning.md @@ -139,3 +139,5 @@ None of the limitations listed here are considered permanent. Over time the HAPI * **Cross-partition History Operations are not supported**: It is not possible to perform a `_history` operation that spans all partitions (`_history` does work when applied to a single partition however). * **Bulk Operations are not partition aware**: Bulk export operations will export data across all partitions. + +* **Package Operations are not partition aware**: Package operations will only create, update and query resources in the default partition. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java index caa425deaec..01b22acbf6d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java @@ -29,10 +29,12 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.dao.data.INpmPackageDao; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionResourceDao; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.NpmPackageEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionResourceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; @@ -54,6 +56,7 @@ import org.apache.http.impl.conn.BasicHttpClientConnectionManager; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.utilities.npm.BasePackageCacheManager; import org.hl7.fhir.utilities.npm.NpmPackage; @@ -115,6 +118,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac private FhirContext myCtx; @Autowired private PlatformTransactionManager myTxManager; + @Autowired + private PartitionSettings myPartitionSettings; @Override public NpmPackage loadPackageFromCacheOnly(String theId, @Nullable String theVersion) { @@ -205,7 +210,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac return newTxTemplate().execute(tx -> { - ResourceTable persistedPackage = (ResourceTable) getBinaryDao().create(binary).getEntity(); + ResourceTable persistedPackage = createResourceBinary(binary); NpmPackageEntity pkg = myPackageDao.findByPackageId(thePackageId).orElseGet(() -> createPackage(npmPackage)); NpmPackageVersionEntity packageVersion = myPackageVersionDao.findByPackageIdAndVersion(thePackageId, packageVersionId).orElse(null); if (packageVersion != null) { @@ -282,7 +287,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac byte[] minimizedContents = packageContext.newJsonParser().encodeResourceToString(resource).getBytes(StandardCharsets.UTF_8); IBaseBinary resourceBinary = createPackageResourceBinary(nextFile, minimizedContents, contentType); - ResourceTable persistedResource = (ResourceTable) getBinaryDao().create(resourceBinary).getEntity(); + ResourceTable persistedResource = createResourceBinary(resourceBinary); NpmPackageVersionResourceEntity resourceEntity = new NpmPackageVersionResourceEntity(); resourceEntity.setPackageVersion(packageVersion); @@ -319,6 +324,16 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac } + private ResourceTable createResourceBinary(IBaseBinary theResourceBinary) { + + if (myPartitionSettings.isPartitioningEnabled()) { + SystemRequestDetails myRequestDetails = new SystemRequestDetails(); + return (ResourceTable) getBinaryDao().create(theResourceBinary, myRequestDetails).getEntity(); + } else { + return (ResourceTable) getBinaryDao().create(theResourceBinary).getEntity(); + } + } + private boolean updateCurrentVersionFlagForAllPackagesBasedOnNewIncomingVersion(String thePackageId, String thePackageVersion) { Collection existingVersions = myPackageVersionDao.findByPackageId(thePackageId); boolean retVal = true; @@ -578,16 +593,14 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac ExpungeOptions options = new ExpungeOptions(); options.setExpungeDeletedResources(true).setExpungeOldVersions(true); - getBinaryDao().delete(next.getResourceBinary().getIdDt().toVersionless()); - getBinaryDao().forceExpungeInExistingTransaction(next.getResourceBinary().getIdDt().toVersionless(), options, null); + deleteAndExpungeResourceBinary(next.getResourceBinary().getIdDt().toVersionless(), options); } myPackageVersionDao.delete(packageVersion.get()); ExpungeOptions options = new ExpungeOptions(); options.setExpungeDeletedResources(true).setExpungeOldVersions(true); - getBinaryDao().delete(packageVersion.get().getPackageBinary().getIdDt().toVersionless()); - getBinaryDao().forceExpungeInExistingTransaction(packageVersion.get().getPackageBinary().getIdDt().toVersionless(), options, null); + deleteAndExpungeResourceBinary(packageVersion.get().getPackageBinary().getIdDt().toVersionless(), options); Collection remainingVersions = myPackageVersionDao.findByPackageId(thePackageId); if (remainingVersions.size() == 0) { @@ -622,6 +635,19 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac return retVal; } + private void deleteAndExpungeResourceBinary(IIdType theResourceBinaryId, ExpungeOptions theOptions) { + + if (myPartitionSettings.isPartitioningEnabled()) { + SystemRequestDetails myRequestDetails = new SystemRequestDetails(); + getBinaryDao().delete(theResourceBinaryId, myRequestDetails).getEntity(); + getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, myRequestDetails); + } else { + getBinaryDao().delete(theResourceBinaryId).getEntity(); + getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, null); + } + } + + @Nonnull public List createSearchPredicates(PackageSearchSpec thePackageSearchSpec, CriteriaBuilder theCb, Root theRoot) { List predicates = new ArrayList<>(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java index 8c5b1e0c784..2ddcb53e9c0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java @@ -31,7 +31,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; import ca.uhn.fhir.rest.api.server.IBundleProvider; @@ -50,12 +52,12 @@ import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.Identifier; +import org.hl7.fhir.utilities.npm.IPackageCacheManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.TransactionTemplate; -import org.hl7.fhir.utilities.npm.BasePackageCacheManager; import org.hl7.fhir.utilities.npm.NpmPackage; import javax.annotation.PostConstruct; @@ -102,7 +104,8 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { private INpmPackageVersionDao myPackageVersionDao; @Autowired private ISearchParamRegistry mySearchParamRegistry; - + @Autowired + private PartitionSettings myPartitionSettings; /** * Constructor */ @@ -316,19 +319,19 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { private void create(IBaseResource theResource, PackageInstallOutcomeJson theOutcome) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); SearchParameterMap map = createSearchParameterMapFor(theResource); - IBundleProvider searchResult = dao.search(map); + IBundleProvider searchResult = searchResource(dao, map); if (validForUpload(theResource)) { if (searchResult.isEmpty()) { ourLog.info("Creating new resource matching {}", map.toNormalizedQueryString(myFhirContext)); theOutcome.incrementResourcesInstalled(myFhirContext.getResourceType(theResource)); - dao.create(theResource); + createResource(dao, theResource); } else { ourLog.info("Updating existing resource matching {}", map.toNormalizedQueryString(myFhirContext)); theResource.setId(searchResult.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless()); - DaoMethodOutcome outcome = dao.update(theResource); + DaoMethodOutcome outcome = updateResource(dao, theResource); if (!outcome.isNop()) { theOutcome.incrementResourcesInstalled(myFhirContext.getResourceType(theResource)); } @@ -337,6 +340,33 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { } } + private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) { + if (myPartitionSettings.isPartitioningEnabled()) { + SystemRequestDetails myRequestDetails = new SystemRequestDetails(); + return theDao.search(theMap, myRequestDetails); + } else { + return theDao.search(theMap); + } + } + + private void createResource(IFhirResourceDao theDao, IBaseResource theResource) { + if (myPartitionSettings.isPartitioningEnabled()) { + SystemRequestDetails myRequestDetails = new SystemRequestDetails(); + theDao.create(theResource, myRequestDetails); + } else { + theDao.create(theResource); + } + } + + private DaoMethodOutcome updateResource(IFhirResourceDao theDao, IBaseResource theResource) { + if (myPartitionSettings.isPartitioningEnabled()) { + SystemRequestDetails myRequestDetails = new SystemRequestDetails(); + return theDao.update(theResource, myRequestDetails); + } else { + return theDao.update(theResource); + } + } + boolean validForUpload(IBaseResource theResource) { String resourceType = myFhirContext.getResourceType(theResource); if ("SearchParameter".equals(resourceType)) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index bb4899fb394..703fb3f6369 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -68,14 +68,20 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { myPartitioningBlacklist.add("Subscription"); myPartitioningBlacklist.add("SearchParameter"); - // Validation + // Validation and Conformance myPartitioningBlacklist.add("StructureDefinition"); myPartitioningBlacklist.add("Questionnaire"); + myPartitioningBlacklist.add("CapabilityStatement"); + myPartitioningBlacklist.add("CompartmentDefinition"); + myPartitioningBlacklist.add("OperationDefinition"); // Terminology myPartitioningBlacklist.add("ConceptMap"); myPartitioningBlacklist.add("CodeSystem"); myPartitioningBlacklist.add("ValueSet"); + myPartitioningBlacklist.add("NamingSystem"); + myPartitioningBlacklist.add("StructureMap"); + } /** @@ -91,7 +97,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { if (myPartitionSettings.isPartitioningEnabled()) { // Handle system requests - if (theRequest == null && myPartitioningBlacklist.contains(theResourceType)) { + if ((theRequest == null && myPartitioningBlacklist.contains(theResourceType)) || theRequest instanceof SystemRequestDetails) { return RequestPartitionId.defaultPartition(); } @@ -123,7 +129,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc { if (myPartitionSettings.isPartitioningEnabled()) { // Handle system requests - if (theRequest == null && myPartitioningBlacklist.contains(theResourceType)) { + if ((theRequest == null && myPartitioningBlacklist.contains(theResourceType)) || theRequest instanceof SystemRequestDetails) { return RequestPartitionId.defaultPartition(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/SystemRequestDetails.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/SystemRequestDetails.java new file mode 100644 index 00000000000..7158cfab12c --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/SystemRequestDetails.java @@ -0,0 +1,159 @@ +package ca.uhn.fhir.jpa.partition; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.api.AddProfileTagEnum; +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.interceptor.api.IInterceptorService; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.rest.api.EncodingEnum; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.ETagSupportEnum; +import ca.uhn.fhir.rest.server.ElementsSupportEnum; +import ca.uhn.fhir.rest.server.IPagingProvider; +import ca.uhn.fhir.rest.server.IRestfulServerDefaults; +import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; + +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.nio.charset.Charset; +import java.util.List; + +/** + * A default RequestDetails implementation that can be used for system calls to + * Resource DAO methods when partitioning is enabled. Using a SystemRequestDetails + * instance for system calls will ensure that any resource queries or updates will + * use the DEFAULT partition when partitioning is enabled. + */ +public class SystemRequestDetails extends RequestDetails { + public SystemRequestDetails() { + super(new MyInterceptorBroadcaster()); + } + + public SystemRequestDetails(IInterceptorBroadcaster theInterceptorBroadcaster) { + super(theInterceptorBroadcaster); + } + + @Override + protected byte[] getByteStreamRequestContents() { + return new byte[0]; + } + + @Override + public Charset getCharset() { + return null; + } + + @Override + public FhirContext getFhirContext() { + return null; + } + + @Override + public String getHeader(String name) { + return null; + } + + @Override + public List getHeaders(String name) { + return null; + } + + @Override + public Object getAttribute(String theAttributeName) { + return null; + } + + @Override + public void setAttribute(String theAttributeName, Object theAttributeValue) { + + } + + @Override + public InputStream getInputStream() throws IOException { + return null; + } + + @Override + public Reader getReader() throws IOException { + return null; + } + + @Override + public IRestfulServerDefaults getServer() { + return new MyRestfulServerDefaults(); + } + + @Override + public String getServerBaseForRequest() { + return null; + } + + private static class MyRestfulServerDefaults implements IRestfulServerDefaults { + + @Override + public AddProfileTagEnum getAddProfileTag() { + return null; + } + + @Override + public EncodingEnum getDefaultResponseEncoding() { + return null; + } + + @Override + public ETagSupportEnum getETagSupport() { + return null; + } + + @Override + public ElementsSupportEnum getElementsSupport() { + return null; + } + + @Override + public FhirContext getFhirContext() { + return null; + } + + @Override + public List getInterceptors_() { + return null; + } + + @Override + public IPagingProvider getPagingProvider() { + return null; + } + + @Override + public boolean isDefaultPrettyPrint() { + return false; + } + + @Override + public IInterceptorService getInterceptorService() { + return null; + } + } + + private static class MyInterceptorBroadcaster implements IInterceptorBroadcaster { + + @Override + public boolean callHooks(Pointcut thePointcut, HookParams theParams) { + return true; + } + + @Override + public Object callHooksAndReturnObject(Pointcut thePointcut, HookParams theParams) { + return null; + } + + @Override + public boolean hasHooks(Pointcut thePointcut) { + return false; + } + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java index adef1a64ce8..0ba24b0bcc0 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java @@ -1,21 +1,20 @@ package ca.uhn.fhir.jpa.packages; +import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jpa.dao.data.INpmPackageDao; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; -import ca.uhn.fhir.util.JsonUtil; -import org.hl7.fhir.utilities.npm.IPackageCacheManager; +import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; import org.hl7.fhir.utilities.npm.NpmPackage; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.io.InputStream; +import java.util.List; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; @@ -28,6 +27,16 @@ public class JpaPackageCacheTest extends BaseJpaR4Test { private INpmPackageDao myPackageDao; @Autowired private INpmPackageVersionDao myPackageVersionDao; + @Autowired + private IInterceptorService myInterceptorService; + @Autowired + private RequestTenantPartitionInterceptor myRequestTenantPartitionInterceptor; + + @AfterEach + public void disablePartitioning() { + myPartitionSettings.setPartitioningEnabled(false); + myInterceptorService.unregisterInterceptor(myRequestTenantPartitionInterceptor); + } @Test @@ -53,6 +62,36 @@ public class JpaPackageCacheTest extends BaseJpaR4Test { } + @Test + public void testSaveAndDeletePackagePartitionsEnabled() throws IOException { + myPartitionSettings.setPartitioningEnabled(true); + myInterceptorService.registerInterceptor(myRequestTenantPartitionInterceptor); + + try (InputStream stream = IgInstallerDstu3Test.class.getResourceAsStream("/packages/basisprofil.de.tar.gz")) { + myPackageCacheManager.addPackageToCache("basisprofil.de", "0.2.40", stream, "basisprofil.de"); + } + + NpmPackage pkg; + + pkg = myPackageCacheManager.loadPackage("basisprofil.de", null); + assertEquals("0.2.40", pkg.version()); + + pkg = myPackageCacheManager.loadPackage("basisprofil.de", "0.2.40"); + assertEquals("0.2.40", pkg.version()); + + try { + myPackageCacheManager.loadPackage("basisprofil.de", "99"); + fail(); + } catch (ResourceNotFoundException e) { + assertEquals("Unable to locate package basisprofil.de#99", e.getMessage()); + } + + PackageDeleteOutcomeJson deleteOutcomeJson = myPackageCacheManager.uninstallPackage("basisprofil.de", "0.2.40"); + List deleteOutcomeMsgs = deleteOutcomeJson.getMessage(); + assertEquals("Deleting package basisprofil.de#0.2.40", deleteOutcomeMsgs.get(0)); + } + + @Test public void testSavePackageWithLongDescription() throws IOException { try (InputStream stream = IgInstallerDstu3Test.class.getResourceAsStream("/packages/package-davinci-cdex-0.2.0.tgz")) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java index a47cda3ade3..2e8b6944700 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java @@ -2,6 +2,8 @@ package ca.uhn.fhir.jpa.packages; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.dao.data.INpmPackageDao; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao; @@ -10,13 +12,18 @@ import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.model.entity.NpmPackageEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionResourceEntity; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.UriParam; +import ca.uhn.fhir.rest.server.IRestfulServerDefaults; +import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; +import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.test.utilities.JettyUtil; import ca.uhn.fhir.test.utilities.ProxyUtil; import ca.uhn.fhir.util.JsonUtil; @@ -63,6 +70,8 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class NpmR4Test extends BaseJpaR4Test { @@ -81,6 +90,10 @@ public class NpmR4Test extends BaseJpaR4Test { @Autowired private INpmPackageVersionResourceDao myPackageVersionResourceDao; private FakeNpmServlet myFakeNpmServlet; + @Autowired + private IInterceptorService myInterceptorService; + @Autowired + private RequestTenantPartitionInterceptor myRequestTenantPartitionInterceptor; @BeforeEach public void before() throws Exception { @@ -105,6 +118,8 @@ public class NpmR4Test extends BaseJpaR4Test { public void after() throws Exception { JettyUtil.closeServer(myServer); myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences()); + myPartitionSettings.setPartitioningEnabled(false); + myInterceptorService.unregisterInterceptor(myRequestTenantPartitionInterceptor); } @@ -273,6 +288,47 @@ public class NpmR4Test extends BaseJpaR4Test { } + @Test + public void testInstallR4Package_NonConformanceResources_Partitioned() throws Exception { + myPartitionSettings.setPartitioningEnabled(true); + myInterceptorService.registerInterceptor(myRequestTenantPartitionInterceptor); + myDaoConfig.setAllowExternalReferences(true); + + byte[] bytes = loadClasspathBytes("/packages/test-organizations-package.tgz"); + myFakeNpmServlet.myResponses.put("/test-organizations/1.0.0", bytes); + + List resourceList = new ArrayList<>(); + resourceList.add("Organization"); + PackageInstallationSpec spec = new PackageInstallationSpec().setName("test-organizations").setVersion("1.0.0").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL); + spec.setInstallResourceTypes(resourceList); + PackageInstallOutcomeJson outcome = igInstaller.install(spec); + assertEquals(3, outcome.getResourcesInstalled().get("Organization")); + + // Be sure no further communication with the server + JettyUtil.closeServer(myServer); + + // Search for the installed resources + mySrd = mock(ServletRequestDetails.class); + when(mySrd.getTenantId()).thenReturn(JpaConstants.DEFAULT_PARTITION_NAME); + when(mySrd.getServer()).thenReturn(mock(RestfulServer.class)); + when(mySrd.getInterceptorBroadcaster()).thenReturn(mock(IInterceptorBroadcaster.class)); + runInTransaction(() -> { + SearchParameterMap map = SearchParameterMap.newSynchronous(); + map.add(Organization.SP_IDENTIFIER, new TokenParam("https://github.com/synthetichealth/synthea", "organization1")); + IBundleProvider result = myOrganizationDao.search(map, mySrd); + assertEquals(1, result.sizeOrThrowNpe()); + map = SearchParameterMap.newSynchronous(); + map.add(Organization.SP_IDENTIFIER, new TokenParam("https://github.com/synthetichealth/synthea", "organization2")); + result = myOrganizationDao.search(map, mySrd); + assertEquals(1, result.sizeOrThrowNpe()); + map = SearchParameterMap.newSynchronous(); + map.add(Organization.SP_IDENTIFIER, new TokenParam("https://github.com/synthetichealth/synthea", "organization3")); + result = myOrganizationDao.search(map, mySrd); + assertEquals(1, result.sizeOrThrowNpe()); + }); + + } + @Test public void testInstallR4Package_NoIdentifierNoUrl() throws Exception { myDaoConfig.setAllowExternalReferences(true);