wip tidy implementaion

This commit is contained in:
Tadgh 2021-04-16 14:18:01 -04:00
parent fad32aa636
commit f91a4f9576
3 changed files with 19 additions and 19 deletions

View File

@ -347,7 +347,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) { private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) {
if (myPartitionSettings.isPartitioningEnabled()) { if (myPartitionSettings.isPartitioningEnabled()) {
SystemRequestDetails requestDetails = new SystemRequestDetails(); SystemRequestDetails requestDetails = new SystemRequestDetails();
requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME); // requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME);
return theDao.search(theMap, requestDetails); return theDao.search(theMap, requestDetails);
} else { } else {
return theDao.search(theMap); return theDao.search(theMap);

View File

@ -103,10 +103,6 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
return RequestPartitionId.defaultPartition(); return RequestPartitionId.defaultPartition();
} }
//Shortcircuit and write system calls out to default partition.
if (theRequest instanceof SystemRequestDetails) {
return getSystemRequestPartitionId(theRequest);
}
// Interceptor call: STORAGE_PARTITION_IDENTIFY_READ // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ
if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) { if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) {
@ -118,6 +114,10 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
requestPartitionId = null; requestPartitionId = null;
} }
if (theRequest instanceof SystemRequestDetails) {
requestPartitionId = getSystemRequestPartitionId(theRequest);
}
validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ); validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ);
return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest); return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest);
@ -159,23 +159,19 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
if (myPartitionSettings.isPartitioningEnabled()) { if (myPartitionSettings.isPartitioningEnabled()) {
//Shortcircuit and write system calls out to default partition.
if (theRequest instanceof SystemRequestDetails) {
return getSystemRequestPartitionId(theRequest);
}
// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE
HookParams params = new HookParams()
.add(IBaseResource.class, theResource)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params);
// Handle system requests // Handle system requests
boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType); boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType);
if (nonPartitionableResource && requestPartitionId == null) { if (nonPartitionableResource) {
requestPartitionId = RequestPartitionId.defaultPartition(); requestPartitionId = RequestPartitionId.defaultPartition();
} else if(theRequest instanceof SystemRequestDetails) {
requestPartitionId = getSystemRequestPartitionId(theRequest);
} else {
HookParams params = new HookParams()// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE
.add(IBaseResource.class, theResource)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params);
} }
String resourceName = myFhirContext.getResourceType(theResource); String resourceName = myFhirContext.getResourceType(theResource);

View File

@ -18,6 +18,7 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.jpa.entity.MdmLink; import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
@ -1104,6 +1105,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
assertThat(nextContents, is(containsString("IMM999"))); assertThat(nextContents, is(containsString("IMM999")));
assertThat(nextContents, is(not(containsString("Flu")))); assertThat(nextContents, is(not(containsString("Flu"))));
myPartitionSettings.setPartitioningEnabled(false);
} }
private void createResources() { private void createResources() {
@ -1113,7 +1115,9 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
//Manually create a golden record //Manually create a golden record
Patient goldenPatient = new Patient(); Patient goldenPatient = new Patient();
goldenPatient.setId("PAT999"); goldenPatient.setId("PAT999");
DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient, new SystemRequestDetails()); SystemRequestDetails srd = new SystemRequestDetails();
srd.setTenantId(JpaConstants.ALL_PARTITIONS_NAME);
DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient, srd);
Long goldenPid = myIdHelperService.getPidOrNull(g1Outcome.getResource()); Long goldenPid = myIdHelperService.getPidOrNull(g1Outcome.getResource());
//Create our golden records' data. //Create our golden records' data.