diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
index e569f7ca872..8a5d1a6f034 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
@@ -27,7 +27,11 @@ import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import javax.annotation.Nonnull;
-import java.util.*;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
/**
* Value for {@link Hook#value()}
@@ -374,7 +378,6 @@ public enum Pointcut {
),
-
/**
* Server Hook:
* This method is called after the server implementation method has been called, but before any attempt
@@ -1323,14 +1326,16 @@ public enum Pointcut {
/**
* Storage Hook:
- * Invoked before an $expunge operation on all data (expungeEverything) is called.
+ * Invoked before FHIR create operation to request the identification of the partition ID to be associated
+ * with the resource being created. This hook will only be called if partitioning is enabled in the JPA
+ * server.
*
- * Hooks will be passed a reference to a counter containing the current number of records that have been deleted.
- * If the hook deletes any records, the hook is expected to increment this counter by the number of records deleted.
- *
* Hooks may accept the following parameters:
+ *
*
+ *
* org.hl7.fhir.instance.model.api.IBaseResource - The resource that will be created and needs a tenant ID assigned.
+ *
*
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
@@ -1346,18 +1351,53 @@ public enum Pointcut {
*
*
*
- * Hooks should return an instance of ca.uhn.fhir.jpa.model.entity.TenantId or null.
+ * Hooks should return an instance of ca.uhn.fhir.jpa.model.entity.PartitionId or null.
*
*/
- STORAGE_TENANT_IDENTIFY_CREATE (
+ STORAGE_PARTITION_IDENTIFY_CREATE(
// Return type
- "ca.uhn.fhir.jpa.model.entity.TenantId",
+ "ca.uhn.fhir.jpa.model.entity.PartitionId",
// Params
"org.hl7.fhir.instance.model.api.IBaseResource",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
+ /**
+ * Storage Hook:
+ * Invoked before FHIR read/access operation (e.g. read/vread, search, history, etc.) operation to request the
+ * identification of the partition ID to be associated with the resource being created. This hook will only be called if
+ * partitioning is enabled in the JPA server.
+ *
+ * Hooks may accept the following parameters:
+ *
+ *
+ *
+ * ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
+ * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
+ * pulled out of the servlet request. Note that the bean
+ * properties are not all guaranteed to be populated, depending on how early during processing the
+ * exception occurred.
+ *
+ *
+ * ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
+ * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
+ * pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
+ * only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
+ *
+ *
+ *
+ * Hooks should return an instance of ca.uhn.fhir.jpa.model.entity.PartitionId or null.
+ *
+ */
+ STORAGE_PARTITION_IDENTIFY_READ(
+ // Return type
+ "ca.uhn.fhir.jpa.model.entity.PartitionId",
+ // Params
+ "ca.uhn.fhir.rest.api.server.RequestDetails",
+ "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
+ ),
+
/**
* Performance Tracing Hook:
* This hook is invoked when any informational messages generated by the
@@ -1680,12 +1720,12 @@ public enum Pointcut {
*
*
* THIS IS AN EXPERIMENTAL HOOK AND MAY BE REMOVED OR CHANGED WITHOUT WARNING.
- *
- *
+ *
+ *
* Note that this is a performance tracing hook. Use with caution in production
* systems, since calling it may (or may not) carry a cost.
- *
- *
+ *
+ *
* Hooks may accept the following parameters:
*
*
@@ -1759,9 +1799,7 @@ public enum Pointcut {
* This pointcut is used only for unit tests. Do not use in production code as it may be changed or
* removed at any time.
*/
- TEST_RO(BaseServerResponseException.class, String.class.getName(), String.class.getName())
-
- ;
+ TEST_RO(BaseServerResponseException.class, String.class.getName(), String.class.getName());
private final List myParameterTypes;
private final Class> myReturnType;
diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
index bc15d09dc64..a9a7992c8cb 100644
--- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
+++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
@@ -136,5 +136,8 @@ ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.expansionTooLarge=Expansion of ValueSet
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
+ca.uhn.fhir.jpa.dao.partition.RequestPartitionHelperService.blacklistedResourceTypeForPartitioning=Resource type {0} can not be partitioned
+
ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference.invalidTargetTypeForChain=Resource type "{0}" is not a valid target type for reference search parameter: {1}
ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference.invalidResourceType=Invalid/unsupported resource type: "{0}"
+
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/atlas/points.json b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/atlas/points.json
index ad2a8e17898..248f9ec787c 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/atlas/points.json
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/atlas/points.json
@@ -20,6 +20,7 @@
"contactName": "Kevin Mayfield",
"contactEmail": "Kevin.mayfield@mayfield-is.co.uk",
"link": "https://data.developer.nhs.uk/ccri/exp",
+ "city": "UK",
"lat": 55.378052,
"lon": -3.435973,
"added": "2019-08-19"
@@ -29,6 +30,7 @@
"contactName": "David Hay",
"contactEmail": "david.hay25@gmail.com",
"link": "http://clinfhir.com",
+ "city": "New Zealand",
"lat": -42.651737,
"lon": 171.926909,
"added": "2019-08-18"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/4_3_0/changes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/4_3_0/changes.yaml
index 08691d1826a..d7523c9e168 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/4_3_0/changes.yaml
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/4_3_0/changes.yaml
@@ -6,6 +6,7 @@
Hibernate ORM (JPA): 5.4.6 -> 5.4.12
Hibernate Search (JPA): 5.11.3 -> 5.11.5
+
Guava (JPA): 28.0 -> 28.2
"
- item:
issue: "1583"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties
index a4f099499ce..0b651cb5814 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties
@@ -44,6 +44,7 @@ page.server_jpa.architecture=Architecture
page.server_jpa.configuration=Configuration
page.server_jpa.search=Search
page.server_jpa.performance=Performance
+page.server_jpa.partitioning=Partitioning
page.server_jpa.upgrading=Upgrade Guide
section.interceptors.title=Interceptors
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/partitioning.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/partitioning.md
new file mode 100644
index 00000000000..25f1534d8fb
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/partitioning.md
@@ -0,0 +1,18 @@
+# Partitioning
+
+# Limitations
+
+Partitioning is a relatively new feature in HAPI FHIR and has a number of known limitations. If you are intending to use partitioning for achieving a multi-tenant architecture it is important to carefully consider these limitations.
+
+None of the limitations listed here are considered permanent. Over time the HAPI FHIR team are hoping to make all of these features partition aware.
+
+* **Subscriptions may not be partitioned**: All subscriptions must be placed in the default partition, and subscribers will receive deliveries for any matching resources from all partitions.
+
+* **Conformance resources may not be partitioned**: The following resources must be placed in the default partition, and will be shared for any validation activities across all partitions:
+ * StructureDefinition
+ * Questionnaire
+ * ValueSet
+ * CodeSystem
+ * ConceptMap
+
+* **Bulk Operations are not partition aware**: Bulk export operations will export data across all partitions.
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java
index 072565cdc43..528ff4d476c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java
@@ -221,7 +221,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
map.setLastUpdated(new DateRangeParam(job.getSince(), null));
}
- IResultIterator resultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, theJobUuid), null);
+ IResultIterator resultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, theJobUuid), null, null);
storeResultsToFiles(nextCollection, sb, resultIterator, jobResourceCounter, jobStopwatch);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
index 11163796b33..9a2b078b889 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
@@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.bulk.BulkDataExportProvider;
import ca.uhn.fhir.jpa.bulk.BulkDataExportSvcImpl;
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.dao.partition.RequestPartitionHelperService;
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
@@ -231,6 +232,11 @@ public abstract class BaseConfig {
return new HapiFhirHibernateJpaDialect(fhirContext().getLocalizer());
}
+ @Bean
+ public RequestPartitionHelperService requestPartitionHelperService() {
+ return new RequestPartitionHelperService();
+ }
+
@Bean
public PersistenceExceptionTranslationPostProcessor persistenceExceptionTranslationPostProcessor() {
return new PersistenceExceptionTranslationPostProcessor();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 20d30f7b211..0231f9f4dd7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -219,7 +219,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
retVal.setResourceType(theEntity.getResourceType());
retVal.setForcedId(theId.getIdPart());
retVal.setResource(theEntity);
- retVal.setTenantId(theEntity.getTenantId());
+ retVal.setPartitionId(theEntity.getPartitionId());
theEntity.setForcedId(retVal);
}
}
@@ -1095,7 +1095,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
provenance.setResourceHistoryTable(historyEntry);
provenance.setResourceTable(entity);
- provenance.setTenantId(entity.getTenantId());
+ provenance.setPartitionId(entity.getPartitionId());
if (haveRequestId) {
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index 01ba1fa05f5..28bff67d924 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -24,10 +24,18 @@ import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.jpa.dao.partition.RequestPartitionHelperService;
import ca.uhn.fhir.jpa.delete.DeleteConflictList;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
-import ca.uhn.fhir.jpa.model.entity.*;
+import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
+import ca.uhn.fhir.jpa.model.entity.BaseTag;
+import ca.uhn.fhir.jpa.model.entity.ForcedId;
+import ca.uhn.fhir.jpa.model.entity.PartitionId;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
+import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.model.entity.TagDefinition;
+import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
@@ -40,18 +48,46 @@ import ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils;
import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.primitive.IdDt;
-import ca.uhn.fhir.rest.api.*;
-import ca.uhn.fhir.rest.api.server.*;
-import ca.uhn.fhir.rest.server.exceptions.*;
+import ca.uhn.fhir.rest.api.CacheControlDirective;
+import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.EncodingEnum;
+import ca.uhn.fhir.rest.api.MethodOutcome;
+import ca.uhn.fhir.rest.api.PatchTypeEnum;
+import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
+import ca.uhn.fhir.rest.api.ValidationModeEnum;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
+import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
+import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
+import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.ReflectionUtil;
import ca.uhn.fhir.util.StopWatch;
-import ca.uhn.fhir.validation.*;
+import ca.uhn.fhir.validation.FhirValidator;
+import ca.uhn.fhir.validation.IInstanceValidatorModule;
+import ca.uhn.fhir.validation.IValidationContext;
+import ca.uhn.fhir.validation.IValidatorModule;
+import ca.uhn.fhir.validation.ValidationOptions;
+import ca.uhn.fhir.validation.ValidationResult;
import org.apache.commons.lang3.Validate;
-import org.hl7.fhir.instance.model.api.*;
+import org.hl7.fhir.instance.model.api.IBaseCoding;
+import org.hl7.fhir.instance.model.api.IBaseMetaType;
+import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.transaction.PlatformTransactionManager;
@@ -67,7 +103,14 @@ import javax.persistence.NoResultException;
import javax.persistence.TypedQuery;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+import java.util.UUID;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@@ -94,6 +137,8 @@ public abstract class BaseHapiFhirResourceDao extends B
private IInstanceValidatorModule myInstanceValidator;
private String myResourceName;
private Class myResourceType;
+ @Autowired
+ private RequestPartitionHelperService myRequestPartitionHelperService;
@Override
public void addTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel, RequestDetails theRequest) {
@@ -161,7 +206,8 @@ public abstract class BaseHapiFhirResourceDao extends B
theResource.setId(UUID.randomUUID().toString());
}
- return doCreate(theResource, theIfNoneExist, thePerformIndexing, theUpdateTimestamp, theRequestDetails);
+ PartitionId partitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource);
+ return doCreate(theResource, theIfNoneExist, thePerformIndexing, theUpdateTimestamp, theRequestDetails, partitionId);
}
@Override
@@ -183,7 +229,7 @@ public abstract class BaseHapiFhirResourceDao extends B
validateIdPresentForDelete(theId);
validateDeleteEnabled();
- final ResourceTable entity = readEntityLatestVersion(theId);
+ final ResourceTable entity = readEntityLatestVersion(theId, theRequest);
if (theId.hasVersionIdPart() && Long.parseLong(theId.getVersionIdPart()) != entity.getVersion()) {
throw new ResourceVersionConflictException("Trying to delete " + theId + " but this is not the current version");
}
@@ -390,7 +436,7 @@ public abstract class BaseHapiFhirResourceDao extends B
}
}
- private DaoMethodOutcome doCreate(T theResource, String theIfNoneExist, boolean thePerformIndexing, Date theUpdateTime, RequestDetails theRequest) {
+ private DaoMethodOutcome doCreate(T theResource, String theIfNoneExist, boolean thePerformIndexing, Date theUpdateTime, RequestDetails theRequest, PartitionId thePartitionId) {
StopWatch w = new StopWatch();
preProcessResourceForStorage(theResource);
@@ -398,17 +444,9 @@ public abstract class BaseHapiFhirResourceDao extends B
ResourceTable entity = new ResourceTable();
entity.setResourceType(toResourceName(theResource));
- if (myDaoConfig.isMultiTenancyEnabled()) {
- // Interceptor call: STORAGE_TENANT_IDENTIFY_CREATE
- HookParams params = new HookParams()
- .add(IBaseResource.class, theResource)
- .add(RequestDetails.class, theRequest)
- .addIfMatchesType(ServletRequestDetails.class, theRequest);
- TenantId tenantId = (TenantId) doCallHooksAndReturnObject(theRequest, Pointcut.STORAGE_TENANT_IDENTIFY_CREATE, params);
- if (tenantId != null) {
- ourLog.debug("Resource has been assigned tenant ID: {}", tenantId);
- entity.setTenantId(tenantId);
- }
+ if (thePartitionId != null) {
+ ourLog.debug("Resource has been assigned partition ID: {}", thePartitionId);
+ entity.setPartitionId(thePartitionId);
}
if (isNotBlank(theIfNoneExist)) {
@@ -709,7 +747,7 @@ public abstract class BaseHapiFhirResourceDao extends B
throw new ResourceNotFoundException(theResourceId);
}
- ResourceTable latestVersion = readEntityLatestVersion(theResourceId);
+ ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest);
if (latestVersion.getVersion() != entity.getVersion()) {
doMetaAdd(theMetaAdd, entity);
} else {
@@ -741,7 +779,7 @@ public abstract class BaseHapiFhirResourceDao extends B
throw new ResourceNotFoundException(theResourceId);
}
- ResourceTable latestVersion = readEntityLatestVersion(theResourceId);
+ ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest);
if (latestVersion.getVersion() != entity.getVersion()) {
doMetaDelete(theMetaDel, entity);
} else {
@@ -817,7 +855,7 @@ public abstract class BaseHapiFhirResourceDao extends B
}
} else {
- entityToUpdate = readEntityLatestVersion(theId);
+ entityToUpdate = readEntityLatestVersion(theId, theRequest);
if (theId.hasVersionIdPart()) {
if (theId.getVersionIdPartAsLong() != entityToUpdate.getVersion()) {
throw new ResourceVersionConflictException("Version " + theId.getVersionIdPart() + " is not the most recent version of this resource, unable to apply patch");
@@ -944,7 +982,6 @@ public abstract class BaseHapiFhirResourceDao extends B
@Override
public BaseHasResource readEntity(IIdType theId, RequestDetails theRequest) {
-
return readEntity(theId, true, theRequest);
}
@@ -952,9 +989,23 @@ public abstract class BaseHapiFhirResourceDao extends B
public BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest) {
validateResourceTypeAndThrowInvalidRequestException(theId);
- ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(getResourceName(), theId.getIdPart());
+ @Nullable PartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, getResourceName());
+ ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(partitionId, getResourceName(), theId.getIdPart());
BaseHasResource entity = myEntityManager.find(ResourceTable.class, pid.getIdAsLong());
+ // Verify that the resource is for the correct partition
+ if (partitionId != null) {
+ if (entity.getPartitionId() != null) {
+ if (!entity.getPartitionId().getPartitionId().equals(partitionId.getPartitionId())) {
+ ourLog.debug("Performing a read for PartitionId={} but entity has partition: {}", partitionId, entity.getPartitionId());
+ entity = null;
+ }
+ } else {
+ ourLog.debug("Performing a read for PartitionId=null but entity has partition: {}", entity.getPartitionId());
+ entity = null;
+ }
+ }
+
if (entity == null) {
throw new ResourceNotFoundException(theId);
}
@@ -990,8 +1041,17 @@ public abstract class BaseHapiFhirResourceDao extends B
return entity;
}
- protected ResourceTable readEntityLatestVersion(IIdType theId) {
- ResourcePersistentId persistentId = myIdHelperService.resolveResourcePersistentIds(getResourceName(), theId.getIdPart());
+ @NotNull
+ protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails) {
+ PartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, getResourceName());
+ return readEntityLatestVersion(theId, partitionId);
+ }
+
+ @NotNull
+ private ResourceTable readEntityLatestVersion(IIdType theId, @Nullable PartitionId thePartitionId) {
+ validateResourceTypeAndThrowInvalidRequestException(theId);
+
+ ResourcePersistentId persistentId = myIdHelperService.resolveResourcePersistentIds(thePartitionId, getResourceName(), theId.getIdPart());
ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId());
if (entity == null) {
throw new ResourceNotFoundException(theId);
@@ -1127,7 +1187,9 @@ public abstract class BaseHapiFhirResourceDao extends B
String uuid = UUID.randomUUID().toString();
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequest, uuid);
- try (IResultIterator iter = builder.createQuery(theParams, searchRuntimeDetails, theRequest)) {
+
+ PartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, getResourceName());
+ try (IResultIterator iter = builder.createQuery(theParams, searchRuntimeDetails, theRequest, partitionId)) {
while (iter.hasNext()) {
retVal.add(iter.next());
}
@@ -1231,10 +1293,11 @@ public abstract class BaseHapiFhirResourceDao extends B
*/
resourceId = theResource.getIdElement();
+ PartitionId partitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource);
try {
- entity = readEntityLatestVersion(resourceId);
+ entity = readEntityLatestVersion(resourceId, partitionId);
} catch (ResourceNotFoundException e) {
- return doCreate(theResource, null, thePerformIndexing, new Date(), theRequest);
+ return doCreate(theResource, null, thePerformIndexing, new Date(), theRequest, partitionId);
}
}
@@ -1282,10 +1345,6 @@ public abstract class BaseHapiFhirResourceDao extends B
ResourceTable savedEntity = updateInternal(theRequest, theResource, thePerformIndexing, theForceUpdateVersion, entity, resourceId, oldResource);
DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, theResource).setCreated(wasDeleted);
- if (!thePerformIndexing) {
- outcome.setId(theResource.getIdElement());
- }
-
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart());
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
@@ -1304,7 +1363,7 @@ public abstract class BaseHapiFhirResourceDao extends B
if (theId == null || theId.hasIdPart() == false) {
throw new InvalidRequestException("No ID supplied. ID is required when validating with mode=DELETE");
}
- final ResourceTable entity = readEntityLatestVersion(theId);
+ final ResourceTable entity = readEntityLatestVersion(theId, theRequest);
// Validate that there are no resources pointing to the candidate that
// would prevent deletion
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java
index 2b36bfb3c58..b21a4c883ca 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java
@@ -191,7 +191,7 @@ public class DaoConfig {
/**
* @since 5.0.0
*/
- private boolean myMultiTenancyEnabled;
+ private boolean myPartitioningEnabled;
/**
* Constructor
@@ -1944,21 +1944,21 @@ public class DaoConfig {
}
/**
- * If enabled (default is false) the JPA server will support multitenant queries
+ * If enabled (default is false) the JPA server will support data partitioning
*
* @since 5.0.0
*/
- public void setMultiTenancyEnabled(boolean theMultiTenancyEnabled) {
- myMultiTenancyEnabled = theMultiTenancyEnabled;
+ public void setPartitioningEnabled(boolean theMultiTenancyEnabled) {
+ myPartitioningEnabled = theMultiTenancyEnabled;
}
/**
- * If enabled (default is false) the JPA server will support multitenant queries
+ * If enabled (default is false) the JPA server will support data partitioning
*
* @since 5.0.0
*/
- public boolean isMultiTenancyEnabled() {
- return myMultiTenancyEnabled;
+ public boolean isPartitioningEnabled() {
+ return myPartitioningEnabled;
}
public enum StoreMetaSourceInformationEnum {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
index ece01089fe6..f693d694b91 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
@@ -50,7 +50,7 @@ public class FhirResourceDaoSubscriptionDstu2 extends BaseHapiFhirResourceDao suggestKeywords(String theContext, String theSearchParam, String theText, RequestDetails theRequest) {
@@ -282,7 +287,10 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
if (contextParts.length != 3 || "Patient".equals(contextParts[0]) == false || "$everything".equals(contextParts[2]) == false) {
throw new InvalidRequestException("Invalid context: " + theContext);
}
- ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(contextParts[0], contextParts[1]);
+
+ // FIXME: this method should require a resource type
+ PartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, null);
+ ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(partitionId, contextParts[0], contextParts[1]);
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java
index b4752748e78..6bb7b1a69f2 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
+import ca.uhn.fhir.jpa.model.entity.PartitionId;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.Include;
@@ -37,12 +38,12 @@ import java.util.Set;
public interface ISearchBuilder {
- IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntime, RequestDetails theRequest);
+ IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntime, RequestDetails theRequest, PartitionId thePartitionId);
+
+ Iterator createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, PartitionId thePartitionId);
void setMaxResultsToFetch(Integer theMaxResultsToFetch);
- Iterator createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest);
-
void loadResourcesByPid(Collection thePids, Collection theIncludedPids, List theResourceListToPopulate, boolean theForHistoryOperation, RequestDetails theDetails);
Set loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection theMatches, Set theRevIncludes, boolean theReverseMode,
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
index 891296fba86..2d68d0778a9 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
@@ -39,6 +39,7 @@ import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
+import ca.uhn.fhir.jpa.model.entity.PartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedCompositeStringUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@@ -165,6 +166,7 @@ public class SearchBuilder implements ISearchBuilder {
private Integer myMaxResultsToFetch;
private Set myPidSet;
private PredicateBuilder myPredicateBuilder;
+ private PartitionId myPartitionId;
/**
* Constructor
@@ -181,7 +183,7 @@ public class SearchBuilder implements ISearchBuilder {
}
private void searchForIdsWithAndOr(String theResourceName, String theNextParamName, List> theAndOrParams, RequestDetails theRequest) {
- myPredicateBuilder.searchForIdsWithAndOr(theResourceName, theNextParamName, theAndOrParams, theRequest);
+ myPredicateBuilder.searchForIdsWithAndOr(theResourceName, theNextParamName, theAndOrParams, theRequest, myPartitionId);
}
private void searchForIdsWithAndOr(@Nonnull SearchParameterMap theParams, RequestDetails theRequest) {
@@ -219,8 +221,8 @@ public class SearchBuilder implements ISearchBuilder {
}
@Override
- public Iterator createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest) {
- init(theParams, theSearchUuid);
+ public Iterator createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, PartitionId thePartitionId) {
+ init(theParams, theSearchUuid, thePartitionId);
TypedQuery query = createQuery(null, null, true, theRequest);
return new CountQueryIterator(query);
@@ -235,8 +237,8 @@ public class SearchBuilder implements ISearchBuilder {
}
@Override
- public IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) {
- init(theParams, theSearchRuntimeDetails.getSearchUuid());
+ public IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest, PartitionId thePartitionId) {
+ init(theParams, theSearchRuntimeDetails.getSearchUuid(), thePartitionId);
if (myPidSet == null) {
myPidSet = new HashSet<>();
@@ -245,13 +247,16 @@ public class SearchBuilder implements ISearchBuilder {
return new QueryIterator(theSearchRuntimeDetails, theRequest);
}
- private void init(SearchParameterMap theParams, String theSearchUuid) {
+ private void init(SearchParameterMap theParams, String theSearchUuid, PartitionId thePartitionId) {
myParams = theParams;
myCriteriaBuilder = myEntityManager.getCriteriaBuilder();
mySearchUuid = theSearchUuid;
myPredicateBuilder = new PredicateBuilder(this, myPredicateBuilderFactory);
+ myPartitionId = thePartitionId;
}
+
+
private TypedQuery createQuery(SortSpec sort, Integer theMaximumResults, boolean theCount, RequestDetails theRequest) {
CriteriaQuery outerQuery;
/*
@@ -296,7 +301,7 @@ public class SearchBuilder implements ISearchBuilder {
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
StringParam idParam = (StringParam) myParams.get(IAnyResource.SP_RES_ID).get(0).get(0);
- ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(myResourceName, idParam.getValue());
+ ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(myPartitionId, myResourceName, idParam.getValue());
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>(1);
}
@@ -352,6 +357,9 @@ public class SearchBuilder implements ISearchBuilder {
myQueryRoot.addPredicate(myCriteriaBuilder.equal(myQueryRoot.get("myResourceType"), myResourceName));
}
myQueryRoot.addPredicate(myCriteriaBuilder.isNull(myQueryRoot.get("myDeleted")));
+ if (myPartitionId != null) {
+ myQueryRoot.addPredicate(myCriteriaBuilder.equal(myQueryRoot.get("myPartitionIdValue"), myPartitionId.getPartitionId()));
+ }
}
// Last updated
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java
index 4e1e2c5a39d..52b6688023e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java
@@ -39,6 +39,9 @@ public interface IForcedIdDao extends JpaRepository {
@Query("SELECT f.myResourcePid FROM ForcedId f WHERE myResourceType = :resource_type AND myForcedId = :forced_id")
Optional findByTypeAndForcedId(@Param("resource_type") String theResourceType, @Param("forced_id") String theForcedId);
+ @Query("SELECT f.myResourcePid FROM ForcedId f WHERE myPartitionId.myPartitionId = :partition_id AND myResourceType = :resource_type AND myForcedId = :forced_id")
+ Optional findByPartitionIdAndTypeAndForcedId(@Param("partition_id") Integer thePartitionId, @Param("resource_type") String theResourceType, @Param("forced_id") String theForcedId);
+
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java
index c0650d840d6..27814d0e92e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java
@@ -48,7 +48,7 @@ public class FhirResourceDaoSubscriptionDstu3 extends BaseHapiFhirResourceDao resolveResourceIdentity(theResourceType, theId));
+ String key = thePartitionId + "/" + theResourceType + "/" + theId;
+ retVal = myPersistentIdCache.get(key, t -> resolveResourceIdentity(thePartitionId, theResourceType, theId));
}
} else {
@@ -248,12 +251,18 @@ public class IdHelperService {
return typeToIds;
}
- private Long resolveResourceIdentity(String theResourceType, String theId) {
- Long retVal;
- retVal = myForcedIdDao
- .findByTypeAndForcedId(theResourceType, theId)
- .orElseThrow(() -> new ResourceNotFoundException(new IdDt(theResourceType, theId)));
- return retVal;
+ private Long resolveResourceIdentity(@Nullable PartitionId thePartitionId, @Nonnull String theResourceType, @Nonnull String theId) {
+ Optional pid;
+ if (thePartitionId != null) {
+ pid = myForcedIdDao.findByPartitionIdAndTypeAndForcedId(thePartitionId.getPartitionId(), theResourceType, theId);
+ } else {
+ pid = myForcedIdDao.findByTypeAndForcedId(theResourceType, theId);
+ }
+
+ if (pid.isPresent() == false) {
+ throw new ResourceNotFoundException(new IdDt(theResourceType, theId));
+ }
+ return pid.get();
}
private Collection translateForcedIdToPids(RequestDetails theRequest, Collection theId) {
@@ -274,7 +283,7 @@ public class IdHelperService {
if (!pids.isEmpty()) {
myResourceTableDao.findLookupFieldsByResourcePid(pids)
.stream()
- .map(lookup -> new ResourceLookup((String)lookup[0], (Long)lookup[1], (Date)lookup[2]))
+ .map(lookup -> new ResourceLookup((String) lookup[0], (Long) lookup[1], (Date) lookup[2]))
.forEach(retVal::add);
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/partition/RequestPartitionHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/partition/RequestPartitionHelperService.java
new file mode 100644
index 00000000000..b74f438ebf5
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/partition/RequestPartitionHelperService.java
@@ -0,0 +1,101 @@
+package ca.uhn.fhir.jpa.dao.partition;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.interceptor.api.HookParams;
+import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
+import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.jpa.dao.DaoConfig;
+import ca.uhn.fhir.jpa.model.entity.PartitionId;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import java.util.HashSet;
+
+import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooksAndReturnObject;
+
+public class RequestPartitionHelperService {
+
+ private final HashSet