Merge branch 'master' into issue-3017-remove-loincxml-file-from-application-used-as-fallback-when-loading-loinc-terminology
This commit is contained in:
commit
4ab60e2f9b
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -73,7 +73,6 @@ ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknow
|
|||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check.
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
||||
|
@ -83,48 +82,41 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationWithMultipleMatchFailure
|
|||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate unique index matching query: {1} (existing index belongs to {2}, new unique index created by {3})
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\")
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.deleteBlockedBecauseDisabled=Resource deletion is not permitted on this server
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithInvalidId=Can not process entity with ID[{0}], this is not a valid FHIR ID
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.incorrectResourceType=Incorrect resource type detected for endpoint, found {0} but expected {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithClientAssignedNumericId=Can not create resource with ID[{0}], no resource with this ID exists and clients may only assign IDs which contain at least one non-numeric character
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithClientAssignedId=Can not create resource with ID[{0}], ID must not be supplied on a create (POST) operation
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithClientAssignedIdNotAllowed=No resource exists on this server resource with ID[{0}], and client-assigned IDs are not enabled.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidParameterChain=Invalid parameter chain: {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidVersion=Version "{0}" is not valid for resource {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.multipleParamsWithSameNameOneIsMissingTrue=This server does not know how to handle multiple "{0}" parameters where one has a value of :missing=true
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.missingBody=No body was supplied in request
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Deletion failed.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulCreate=Successfully created resource "{0}" in {1}ms
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulUpdate=Successfully updated resource "{0}" in {1}ms
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulDeletes=Successfully deleted {0} resource(s) in {1}ms
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.updateWithNoId=Can not update resource of type {0} as it has no ID
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\")
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteBlockedBecauseDisabled=Resource deletion is not permitted on this server
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithInvalidId=Can not process entity with ID[{0}], this is not a valid FHIR ID
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.incorrectResourceType=Incorrect resource type detected for endpoint, found {0} but expected {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithClientAssignedNumericId=Can not create resource with ID[{0}], no resource with this ID exists and clients may only assign IDs which contain at least one non-numeric character
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithClientAssignedId=Can not create resource with ID[{0}], ID must not be supplied on a create (POST) operation
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithClientAssignedIdNotAllowed=No resource exists on this server resource with ID[{0}], and client-assigned IDs are not enabled.
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidParameterChain=Invalid parameter chain: {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidVersion=Version "{0}" is not valid for resource {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.multipleParamsWithSameNameOneIsMissingTrue=This server does not know how to handle multiple "{0}" parameters where one has a value of :missing=true
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.missingBody=No body was supplied in request
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Deletion failed.
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreate=Successfully created resource "{0}" in {1}ms
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdate=Successfully updated resource "{0}" in {1}ms
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulDeletes=Successfully deleted {0} resource(s) in {1}ms
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.updateWithNoId=Can not update resource of type {0} as it has no ID
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidBundleTypeForStorage=Unable to store a Bundle resource on this server with a Bundle.type value of: {0}. Note that if you are trying to perform a FHIR 'transaction' or 'batch' operation you should POST the Bundle resource to the Base URL of the server, not to the '/Bundle' endpoint.
|
||||
|
||||
ca.uhn.fhir.rest.api.PatchTypeEnum.missingPatchContentType=Missing or invalid content type for PATCH operation
|
||||
ca.uhn.fhir.rest.api.PatchTypeEnum.invalidPatchContentType=Invalid Content-Type for PATCH operation: {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.unsupportedResourceType=Resource {0} is not supported on this server. Supported resource types: {1}
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0}
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.fhirPatchShouldNotUseBinaryResource=Binary PATCH detected with FHIR content type. FHIR Patch should use Parameters resource.
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.missingPatchBody=Unable to determine PATCH body from request
|
||||
ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.fhirPatchShouldNotUseBinaryResource=Binary PATCH detected with FHIR content type. FHIR Patch should use Parameters resource.
|
||||
ca.uhn.fhir.jpa.patch.FhirPatch.invalidInsertIndex=Invalid insert index {0} for path {1} - Only have {2} existing entries
|
||||
ca.uhn.fhir.jpa.patch.FhirPatch.invalidMoveSourceIndex=Invalid move source index {0} for path {1} - Only have {2} existing entries
|
||||
ca.uhn.fhir.jpa.patch.FhirPatch.invalidMoveDestinationIndex=Invalid move destination index {0} for path {1} - Only have {2} existing entries
|
||||
|
||||
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.externalReferenceNotAllowed=Resource contains external reference to URL "{0}" but this server is not configured to allow external references
|
||||
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
|
||||
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
|
||||
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.sourceParamDisabled=The _source parameter is disabled on this server
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -54,6 +54,8 @@ public final class BatchConstants {
|
|||
* MDM Clear
|
||||
*/
|
||||
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
|
||||
public static final String BULK_EXPORT_READ_CHUNK_PARAMETER = "readChunkSize";
|
||||
public static final String BULK_EXPORT_GROUP_ID_PARAMETER = "groupId";
|
||||
/**
|
||||
* This Set contains the step names across all job types that are appropriate for
|
||||
* someone to look at the write count for that given step in order to determine the
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2446
|
||||
title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean))
|
||||
title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean))
|
||||
now defaults to `true`."
|
||||
|
|
|
@ -9,7 +9,7 @@ See the [Modules Page](/docs/getting_started/modules.html) for more information
|
|||
* [Model API (R5)](/apidocs/hapi-fhir-structures-r5/) - hapi-fhir-structures-r5
|
||||
* [Client API](/apidocs/hapi-fhir-client/) - hapi-fhir-client
|
||||
* [Plain Server API](/apidocs/hapi-fhir-server/) - hapi-fhir-server
|
||||
* [JPA Server - API](/apidocs/hapi-fhir-storage-api/) - hapi-fhir-storage-api
|
||||
* [JPA Server - API](/apidocs/hapi-fhir-storage/) - hapi-fhir-storage
|
||||
* [JPA Server - Model](/apidocs/hapi-fhir-jpaserver-model/) - hapi-fhir-jpaserver-model
|
||||
* [JPA Server - Base](/apidocs/hapi-fhir-jpaserver-base/) - hapi-fhir-jpaserver-base
|
||||
* [Version Converter API](/apidocs/hapi-fhir-converter/) - hapi-fhir-converter
|
||||
|
|
|
@ -61,7 +61,7 @@ but rather that this is an identifier for a ValueSet where `ValueSet.url` has th
|
|||
|
||||
HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical
|
||||
property (
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set)))
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set)))
|
||||
.
|
||||
|
||||
For example:
|
||||
|
@ -137,5 +137,5 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
|
|||
|
||||
Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
|
||||
?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
property.
|
||||
|
|
|
@ -26,7 +26,7 @@ The grouping of Observation resources by `Observation.code` means that the `$las
|
|||
|
||||
The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled
|
||||
property (
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean)))
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean)))
|
||||
.
|
||||
|
||||
In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters
|
||||
|
|
|
@ -246,11 +246,11 @@ an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE**
|
|||
visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
|
||||
|
||||
If the server has been configured with
|
||||
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
of [UUID](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or
|
||||
the server has been configured with
|
||||
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum))
|
||||
of [ANY](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY)
|
||||
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
of [UUID](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or the
|
||||
server has been configured with
|
||||
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum))
|
||||
of [ANY](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY)
|
||||
the server will create a Forced ID for all resources (not only resources having textual IDs).
|
||||
|
||||
## Columns
|
||||
|
|
|
@ -26,8 +26,8 @@ One important caveat is that chaining is currently not supported when using this
|
|||
## Enabling MDM Expansion
|
||||
|
||||
On top of needing to instantiate an MDM module, you must enable this feature in
|
||||
the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using
|
||||
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean))
|
||||
the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using
|
||||
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean))
|
||||
property.
|
||||
|
||||
<div class="helpWarningCalloutBox">
|
||||
|
|
|
@ -601,7 +601,7 @@ This operation takes two optional Parameters.
|
|||
<td>0..1</td>
|
||||
<td>
|
||||
The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
property.
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -57,7 +57,7 @@ This fact can have security implications:
|
|||
in use in another partition.
|
||||
|
||||
* In a server using the default configuration of
|
||||
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned.
|
||||
|
||||
These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs.
|
||||
|
|
|
@ -38,7 +38,12 @@ This means that:
|
|||
|
||||
# Using the Repository Validating Interceptor
|
||||
|
||||
Using the repository validating interceptor is as simple as creating a new instance of [RepositoryValidatingInterceptor](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.html) and registering it with the interceptor registry. The only tricky part is initializing your rules, which must be done using a [RepositoryValidatingRuleBuilder](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.html).
|
||||
Using the repository validating interceptor is as simple as creating a new instance
|
||||
of [RepositoryValidatingInterceptor](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.html)
|
||||
and registering it with the interceptor registry. The only tricky part is initializing your rules, which must be done
|
||||
using
|
||||
a [RepositoryValidatingRuleBuilder](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.html)
|
||||
.
|
||||
|
||||
The rule builder must be obtained from the Spring context, as shown below:
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -118,7 +118,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-storage-api</artifactId>
|
||||
<artifactId>hapi-fhir-storage</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -123,6 +123,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
|
|||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
|
||||
import ca.uhn.fhir.jpa.search.reindex.BlockPolicy;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
||||
|
@ -217,7 +218,6 @@ public abstract class BaseConfig {
|
|||
public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider";
|
||||
public static final String SEARCH_BUILDER = "SearchBuilder";
|
||||
public static final String HISTORY_BUILDER = "HistoryBuilder";
|
||||
public static final String REPOSITORY_VALIDATING_RULE_BUILDER = "repositoryValidatingRuleBuilder";
|
||||
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
|
||||
@Autowired
|
||||
protected Environment myEnv;
|
||||
|
@ -404,7 +404,7 @@ public abstract class BaseConfig {
|
|||
asyncTaskExecutor.setQueueCapacity(0);
|
||||
asyncTaskExecutor.setAllowCoreThreadTimeOut(true);
|
||||
asyncTaskExecutor.setThreadNamePrefix("JobLauncher-");
|
||||
asyncTaskExecutor.setRejectedExecutionHandler(new ResourceReindexingSvcImpl.BlockPolicy());
|
||||
asyncTaskExecutor.setRejectedExecutionHandler(new BlockPolicy());
|
||||
asyncTaskExecutor.initialize();
|
||||
return asyncTaskExecutor;
|
||||
}
|
||||
|
@ -639,7 +639,7 @@ public abstract class BaseConfig {
|
|||
return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest);
|
||||
}
|
||||
|
||||
@Bean(name = REPOSITORY_VALIDATING_RULE_BUILDER)
|
||||
@Bean(name = RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER)
|
||||
@Scope("prototype")
|
||||
public RepositoryValidatingRuleBuilder repositoryValidatingRuleBuilder() {
|
||||
return new RepositoryValidatingRuleBuilder();
|
||||
|
|
|
@ -6,10 +6,9 @@ import ca.uhn.fhir.context.support.IValidationSupport;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
|
||||
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessorVersionAdapterDstu2;
|
||||
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcDstu2;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
|
@ -97,7 +96,7 @@ public class BaseDstu2Config extends BaseConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
return new TransactionProcessorVersionAdapterDstu2();
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
|||
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.TransactionProcessorVersionAdapterDstu3;
|
||||
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
|
@ -83,7 +83,7 @@ public class BaseDstu3Config extends BaseConfigDstu3Plus {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
return new TransactionProcessorVersionAdapterDstu3();
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
|||
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
|
||||
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
||||
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
|
@ -78,7 +78,7 @@ public class BaseR4Config extends BaseConfigDstu3Plus {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
return new TransactionProcessorVersionAdapterR4();
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
|||
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
|
||||
import ca.uhn.fhir.jpa.dao.r5.TransactionProcessorVersionAdapterR5;
|
||||
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
|
@ -76,7 +76,7 @@ public class BaseR5Config extends BaseConfigDstu3Plus {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
return new TransactionProcessorVersionAdapterR5();
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
|||
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
|
||||
|
@ -77,7 +78,6 @@ import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
|||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
|
@ -181,14 +181,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
public static final long INDEX_STATUS_INDEXED = 1L;
|
||||
public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
|
||||
public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile";
|
||||
public static final String OO_SEVERITY_ERROR = "error";
|
||||
public static final String OO_SEVERITY_INFO = "information";
|
||||
public static final String OO_SEVERITY_WARN = "warning";
|
||||
public static final String XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS = BaseHapiFhirDao.class.getName() + "_RESOLVED_TAG_DEFINITIONS";
|
||||
public static final String XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS = BaseHapiFhirDao.class.getName() + "_EXISTING_SEARCH_PARAMS";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
|
||||
private static final Map<FhirVersionEnum, FhirContext> ourRetrievalContexts = new HashMap<>();
|
||||
private static final String PROCESSING_SUB_REQUEST = "BaseHapiFhirDao.processingSubRequest";
|
||||
private static boolean ourValidationDisabledForUnitTest;
|
||||
private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
|
||||
|
||||
|
@ -429,7 +423,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
|
||||
if (retVal == null) {
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, () -> new HashMap<>());
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, () -> new HashMap<>());
|
||||
retVal = resolvedTagDefinitions.get(key);
|
||||
|
||||
if (retVal == null) {
|
||||
|
@ -508,19 +502,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return LogicalReferenceHelper.isLogicalReference(myConfig.getModelConfig(), theId);
|
||||
}
|
||||
|
||||
public void notifyInterceptors(RestOperationTypeEnum theOperationType, ActionRequestDetails theRequestDetails) {
|
||||
if (theRequestDetails.getId() != null && theRequestDetails.getId().hasResourceType() && isNotBlank(theRequestDetails.getResourceType())) {
|
||||
if (theRequestDetails.getId().getResourceType().equals(theRequestDetails.getResourceType()) == false) {
|
||||
throw new InternalErrorException(
|
||||
"Inconsistent server state - Resource types don't match: " + theRequestDetails.getId().getResourceType() + " / " + theRequestDetails.getResourceType());
|
||||
}
|
||||
}
|
||||
|
||||
if (theRequestDetails.getUserData().get(PROCESSING_SUB_REQUEST) == Boolean.TRUE) {
|
||||
theRequestDetails.notifyIncomingRequestPreHandled(theOperationType);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the resource has changed (either the contents or the tags)
|
||||
*/
|
||||
|
@ -1195,7 +1176,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
// CREATE or UPDATE
|
||||
|
||||
IdentityHashMap<ResourceTable, ResourceIndexedSearchParams> existingSearchParams = theTransactionDetails.getOrCreateUserData(XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS, () -> new IdentityHashMap<>());
|
||||
IdentityHashMap<ResourceTable, ResourceIndexedSearchParams> existingSearchParams = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS, () -> new IdentityHashMap<>());
|
||||
existingParams = existingSearchParams.get(entity);
|
||||
if (existingParams == null) {
|
||||
existingParams = new ResourceIndexedSearchParams(entity);
|
||||
|
@ -1682,18 +1663,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return retVal.toString();
|
||||
}
|
||||
|
||||
public static void clearRequestAsProcessingSubRequest(RequestDetails theRequestDetails) {
|
||||
if (theRequestDetails != null) {
|
||||
theRequestDetails.getUserData().remove(PROCESSING_SUB_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
public static void markRequestAsProcessingSubRequest(RequestDetails theRequestDetails) {
|
||||
if (theRequestDetails != null) {
|
||||
theRequestDetails.getUserData().put(PROCESSING_SUB_REQUEST, Boolean.TRUE);
|
||||
}
|
||||
}
|
||||
|
||||
public static void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity) {
|
||||
if (theEntity.getDeleted() != null) {
|
||||
theEntity.setNarrativeText(null);
|
||||
|
|
|
@ -36,7 +36,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
|||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
|
@ -223,7 +223,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
*/
|
||||
protected DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
|
||||
if (theResource == null) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "missingBody");
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody");
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
|
||||
|
@ -383,7 +383,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
outcome.setId(theResource.getIdElement());
|
||||
}
|
||||
|
||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
|
||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
|
||||
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
|
||||
|
||||
String forcedId = null;
|
||||
|
@ -415,7 +415,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
protected String getMessageSanitized(String theKey, String theIdPart) {
|
||||
return getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, theKey, theIdPart);
|
||||
return getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, theKey, theIdPart);
|
||||
}
|
||||
|
||||
private boolean isSystemRequest(RequestDetails theRequest) {
|
||||
|
@ -448,7 +448,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
DaoMethodOutcome retVal = delete(theId, deleteConflicts, theRequestDetails, transactionDetails);
|
||||
|
||||
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
|
||||
ourLog.debug("Processed delete on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
|
||||
return retVal;
|
||||
|
@ -475,7 +475,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
outcome.setId(id);
|
||||
|
||||
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext());
|
||||
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", 1, 0);
|
||||
String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", 1, 0);
|
||||
String severity = "information";
|
||||
String code = "informational";
|
||||
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
|
||||
|
@ -524,7 +524,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete).setCreated(true);
|
||||
|
||||
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext());
|
||||
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", 1, w.getMillis());
|
||||
String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", 1, w.getMillis());
|
||||
String severity = "information";
|
||||
String code = "informational";
|
||||
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
|
||||
|
@ -547,7 +547,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest);
|
||||
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
return outcome;
|
||||
});
|
||||
}
|
||||
|
@ -657,7 +657,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
|
||||
} else {
|
||||
oo = OperationOutcomeUtil.newInstance(getContext());
|
||||
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", deletedResources.size(), w.getMillis());
|
||||
String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", deletedResources.size(), w.getMillis());
|
||||
String severity = "information";
|
||||
String code = "informational";
|
||||
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
|
||||
|
@ -673,7 +673,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
private void validateDeleteEnabled() {
|
||||
if (!getConfig().isDeleteEnabled()) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "deleteBlockedBecauseDisabled");
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "deleteBlockedBecauseDisabled");
|
||||
throw new PreconditionFailedException(msg);
|
||||
}
|
||||
}
|
||||
|
@ -1267,7 +1267,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
if (theId.hasVersionIdPart()) {
|
||||
if (theId.isVersionIdPartValidLong() == false) {
|
||||
throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
|
||||
throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
|
||||
}
|
||||
if (entity.getVersion() != theId.getVersionIdPartAsLong()) {
|
||||
entity = null;
|
||||
|
@ -1283,7 +1283,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
try {
|
||||
entity = q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
|
||||
throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1588,12 +1588,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Override
|
||||
public DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, @Nonnull TransactionDetails theTransactionDetails) {
|
||||
if (theResource == null) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "missingBody");
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody");
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
if (!theResource.getIdElement().hasIdPart() && isBlank(theMatchUrl)) {
|
||||
String type = myFhirContext.getResourceType(theResource);
|
||||
String msg = myFhirContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "updateWithNoId", type);
|
||||
String msg = myFhirContext.getLocalizer().getMessage(BaseStorageDao.class, "updateWithNoId", type);
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
|
||||
|
@ -1733,7 +1733,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
outcome.setId(id);
|
||||
}
|
||||
|
||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart());
|
||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart());
|
||||
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
|
||||
|
||||
ourLog.debug(msg);
|
||||
|
@ -1762,7 +1762,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (getConfig().isEnforceReferentialIntegrityOnDelete()) {
|
||||
myDeleteConflictService.validateOkToDelete(deleteConflicts, entity, true, theRequest, new TransactionDetails());
|
||||
}
|
||||
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
|
||||
IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete");
|
||||
return new MethodOutcome(new IdDt(theId.getValue()), oo);
|
||||
|
@ -1790,7 +1790,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (resourceToValidateById != null) {
|
||||
result = validator.validateWithResult(resourceToValidateById, options);
|
||||
} else {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "cantValidateWithNoResource");
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "cantValidateWithNoResource");
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
} else if (isNotBlank(theRawResource)) {
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class TransactionProcessorVersionAdapterDstu2 implements TransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.Entry> {
|
||||
public class TransactionProcessorVersionAdapterDstu2 implements ITransactionProcessorVersionAdapter<Bundle, Bundle.Entry> {
|
||||
@Override
|
||||
public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) {
|
||||
theBundleEntry.getResponse().setStatus(theStatus);
|
||||
|
|
|
@ -34,7 +34,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
|
@ -48,9 +48,7 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
|||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.SourceParam;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterOr;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
|
@ -75,6 +73,8 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -96,7 +96,6 @@ import javax.persistence.criteria.Subquery;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
|
@ -347,7 +346,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
|||
}
|
||||
|
||||
if (!foundChainMatch) {
|
||||
throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
|
||||
throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
|
||||
}
|
||||
|
||||
if (candidateTargetTypes.size() > 1) {
|
||||
|
@ -684,7 +683,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
|||
|
||||
} else {
|
||||
Collection<String> validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
|
||||
String msg = myContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", theParamName, theResourceName, validNames);
|
||||
String msg = myContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", theParamName, theResourceName, validNames);
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.dao.r5;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
|
@ -34,7 +34,7 @@ import org.hl7.fhir.r5.model.Resource;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class TransactionProcessorVersionAdapterR5 implements TransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> {
|
||||
public class TransactionProcessorVersionAdapterR5 implements ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> {
|
||||
@Override
|
||||
public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) {
|
||||
theBundleEntry.getResponse().setStatus(theStatus);
|
||||
|
|
|
@ -27,16 +27,16 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteConflict;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
|
@ -64,35 +64,6 @@ public class DeleteConflictService {
|
|||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
|
||||
public int validateOkToDelete(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
|
||||
// We want the list of resources that are marked to be the same list even as we
|
||||
// drill into conflict resolution stacks.. this allows us to not get caught by
|
||||
// circular references
|
||||
DeleteConflictList newConflicts = new DeleteConflictList(theDeleteConflicts);
|
||||
|
||||
// In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
|
||||
// Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
|
||||
|
||||
DeleteConflictOutcome outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT, theTransactionDetails);
|
||||
|
||||
int retryCount = 0;
|
||||
while (outcome != null) {
|
||||
int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS);
|
||||
if (!(retryCount < shouldRetryCount)) break;
|
||||
newConflicts = new DeleteConflictList(newConflicts);
|
||||
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, myDaoConfig.getMaximumDeleteConflictQueryCount(), theTransactionDetails);
|
||||
++retryCount;
|
||||
}
|
||||
theDeleteConflicts.addAll(newConflicts);
|
||||
if (retryCount >= MAX_RETRY_ATTEMPTS && !theDeleteConflicts.isEmpty()) {
|
||||
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, BaseHapiFhirDao.OO_SEVERITY_ERROR, MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, null, "processing");
|
||||
throw new ResourceVersionConflictException(MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, oo);
|
||||
}
|
||||
return retryCount;
|
||||
}
|
||||
|
||||
private DeleteConflictOutcome findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount, TransactionDetails theTransactionDetails) {
|
||||
List<ResourceLink> resultList = myDeleteConflictFinderService.findConflicts(theEntity, theMinQueryResultCount);
|
||||
if (resultList.isEmpty()) {
|
||||
|
@ -139,35 +110,33 @@ public class DeleteConflictService {
|
|||
}
|
||||
}
|
||||
|
||||
public static void validateDeleteConflictsEmptyOrThrowException(FhirContext theFhirContext, DeleteConflictList theDeleteConflicts) {
|
||||
IBaseOperationOutcome oo = null;
|
||||
String firstMsg = null;
|
||||
public int validateOkToDelete(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
|
||||
for (DeleteConflict next : theDeleteConflicts) {
|
||||
// We want the list of resources that are marked to be the same list even as we
|
||||
// drill into conflict resolution stacks.. this allows us to not get caught by
|
||||
// circular references
|
||||
DeleteConflictList newConflicts = new DeleteConflictList(theDeleteConflicts);
|
||||
|
||||
if (theDeleteConflicts.isResourceIdToIgnoreConflict(next.getTargetId())) {
|
||||
continue;
|
||||
// In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
|
||||
// Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
|
||||
|
||||
DeleteConflictOutcome outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT, theTransactionDetails);
|
||||
|
||||
int retryCount = 0;
|
||||
while (outcome != null) {
|
||||
int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS);
|
||||
if (!(retryCount < shouldRetryCount)) break;
|
||||
newConflicts = new DeleteConflictList(newConflicts);
|
||||
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, myDaoConfig.getMaximumDeleteConflictQueryCount(), theTransactionDetails);
|
||||
++retryCount;
|
||||
}
|
||||
|
||||
String msg = "Unable to delete " +
|
||||
next.getTargetId().toUnqualifiedVersionless().getValue() +
|
||||
" because at least one resource has a reference to this resource. First reference found was resource " +
|
||||
next.getSourceId().toUnqualifiedVersionless().getValue() +
|
||||
" in path " +
|
||||
next.getSourcePath();
|
||||
|
||||
if (firstMsg == null) {
|
||||
firstMsg = msg;
|
||||
oo = OperationOutcomeUtil.newInstance(theFhirContext);
|
||||
theDeleteConflicts.addAll(newConflicts);
|
||||
if (retryCount >= MAX_RETRY_ATTEMPTS && !theDeleteConflicts.isEmpty()) {
|
||||
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, BaseStorageDao.OO_SEVERITY_ERROR, MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, null, "processing");
|
||||
throw new ResourceVersionConflictException(MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, oo);
|
||||
}
|
||||
OperationOutcomeUtil.addIssue(theFhirContext, oo, BaseHapiFhirDao.OO_SEVERITY_ERROR, msg, null, "processing");
|
||||
}
|
||||
|
||||
if (firstMsg == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new ResourceVersionConflictException(firstMsg, oo);
|
||||
return retryCount;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderToken;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
|
@ -109,11 +109,9 @@ import java.util.Collection;
|
|||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -443,7 +441,7 @@ public class QueryStack {
|
|||
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName);
|
||||
if (searchParam == null) {
|
||||
Collection<String> validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
|
||||
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", paramName, theResourceName, validNames);
|
||||
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", paramName, theResourceName, validNames);
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
RestSearchParameterTypeEnum typeEnum = searchParam.getParamType();
|
||||
|
@ -1204,7 +1202,7 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
} else {
|
||||
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", theParamName, theResourceName, mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName));
|
||||
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", theParamName, theResourceName, mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName));
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ import ca.uhn.fhir.jpa.api.dao.IDao;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
|
@ -51,6 +51,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.search.SearchConstants;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
|
||||
|
@ -138,7 +139,8 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
* for an explanation of why we use the constant 800
|
||||
*/
|
||||
// NB: keep public
|
||||
public static final int MAXIMUM_PAGE_SIZE = 800;
|
||||
@Deprecated
|
||||
public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE;
|
||||
public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
|
||||
private static final ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L);
|
||||
|
@ -547,7 +549,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
|
||||
if (param == null) {
|
||||
String msg = myContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSortParameter", theSort.getParamName(), getResourceName(), mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(getResourceName()));
|
||||
String msg = myContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSortParameter", theSort.getParamName(), getResourceName(), mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(getResourceName()));
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
|
@ -400,7 +400,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
}
|
||||
|
||||
if (!foundChainMatch) {
|
||||
throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
|
||||
throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
|
||||
}
|
||||
|
||||
candidateTargetTypes.add(nextType);
|
||||
|
@ -414,7 +414,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
}
|
||||
|
||||
if (candidateTargetTypes.isEmpty()) {
|
||||
throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
|
||||
throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
|
||||
}
|
||||
|
||||
if (candidateTargetTypes.size() > 1) {
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.apache.commons.lang3.Validate;
|
|||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import javax.annotation.Nullable;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -54,6 +53,7 @@ import org.springframework.transaction.TransactionDefinition;
|
|||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
@ -142,29 +142,6 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A handler for rejected tasks that will have the caller block until space is available.
|
||||
* This was stolen from old hibernate search(5.X.X), as it has been removed in HS6. We can probably come up with a better solution though.
|
||||
*/
|
||||
public static class BlockPolicy implements RejectedExecutionHandler {
|
||||
|
||||
/**
|
||||
* Puts the Runnable to the blocking queue, effectively blocking the delegating thread until space is available.
|
||||
*
|
||||
* @param r the runnable task requested to be executed
|
||||
* @param e the executor attempting to execute this task
|
||||
*/
|
||||
@Override
|
||||
public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
|
||||
try {
|
||||
e.getQueue().put( r );
|
||||
} catch (InterruptedException e1) {
|
||||
ourLog.error("Interrupted Execption for task: {}",r, e1 );
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void scheduleJob() {
|
||||
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||
|
|
|
@ -144,7 +144,7 @@ public class TransactionProcessorTest {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public BaseTransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> versionAdapter() {
|
||||
public ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> versionAdapter() {
|
||||
return new TransactionProcessorVersionAdapterR4();
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.dao.dstu2;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3Test;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
|
@ -416,15 +416,15 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
|||
* If any of this ever fails, it means that one of the OperationOutcome issue severity codes has changed code value across versions. We store the string as a constant, so something will need to
|
||||
* be fixed.
|
||||
*/
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.ERROR.getCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO);
|
||||
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.INFORMATION.getCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN);
|
||||
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN);
|
||||
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.WARNING.getCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN);
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseStorageDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseStorageDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.ERROR.getCode(), BaseStorageDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseStorageDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseStorageDao.OO_SEVERITY_INFO);
|
||||
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.INFORMATION.getCode(), BaseStorageDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseStorageDao.OO_SEVERITY_WARN);
|
||||
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseStorageDao.OO_SEVERITY_WARN);
|
||||
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.WARNING.getCode(), BaseStorageDao.OO_SEVERITY_WARN);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -5,7 +5,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
|
@ -1014,13 +1014,13 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
* be fixed.
|
||||
*/
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseStorageDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirDao.OO_SEVERITY_ERROR);
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseStorageDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirDao.OO_SEVERITY_INFO);
|
||||
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirDao.OO_SEVERITY_WARN);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseStorageDao.OO_SEVERITY_WARN);
|
||||
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirDao.OO_SEVERITY_WARN);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.interceptor.validation;
|
||||
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
|
@ -76,7 +75,7 @@ public class RepositoryValidatingInterceptorHttpR4Test extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
private RepositoryValidatingRuleBuilder newRuleBuilder() {
|
||||
return myApplicationContext.getBean(BaseConfig.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class);
|
||||
return myApplicationContext.getBean(RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.interceptor.validation;
|
||||
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
|
@ -388,7 +387,7 @@ public class RepositoryValidatingInterceptorR4Test extends BaseJpaR4Test {
|
|||
|
||||
|
||||
private RepositoryValidatingRuleBuilder newRuleBuilder() {
|
||||
return myApplicationContext.getBean(BaseConfig.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class);
|
||||
return myApplicationContext.getBean(RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.interceptor.validation;
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
|
@ -23,7 +22,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
@ -118,7 +116,7 @@ public class ValidationMessageSuppressingInterceptorTest extends BaseResourcePro
|
|||
public void testRepositoryValidation() {
|
||||
createPatient(withActiveTrue(), withId("A"));
|
||||
|
||||
List<IRepositoryValidatingRule> rules = myApplicationContext.getBean(BaseConfig.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class)
|
||||
List<IRepositoryValidatingRule> rules = myApplicationContext.getBean(RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class)
|
||||
.forResourcesOfType("Encounter")
|
||||
.requireValidationToDeclaredProfiles().withBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Ignore)
|
||||
.build();
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -22,7 +22,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-storage-api</artifactId>
|
||||
<artifactId>hapi-fhir-storage</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -29,7 +29,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-storage-api</artifactId>
|
||||
<artifactId>hapi-fhir-storage</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -142,7 +142,7 @@ public class SearchPreferHandlingInterceptor {
|
|||
// Strict handling
|
||||
List<String> allowedParams = searchParamRetriever.getActiveSearchParams(resourceName).keySet().stream().sorted().distinct().collect(Collectors.toList());
|
||||
HapiLocalizer localizer = theRequestDetails.getFhirContext().getLocalizer();
|
||||
String msg = localizer.getMessage("ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter", paramName, resourceName, allowedParams);
|
||||
String msg = localizer.getMessage("ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter", paramName, resourceName, allowedParams);
|
||||
throw new InvalidRequestException(msg);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,10 +1,29 @@
|
|||
package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE6-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -20,7 +20,7 @@ import java.sql.SQLException;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue