Merge branch 'master' into 3020_Add_documentation_for_$partition-management-create-partition

This commit is contained in:
katie_smilecdr 2021-09-24 13:01:38 -04:00
commit 8c145d1212
381 changed files with 5949 additions and 810 deletions

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -73,7 +73,6 @@ ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknow
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check.
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}" ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}"
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}" ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
@ -83,48 +82,41 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationWithMultipleMatchFailure
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1} ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate unique index matching query: {1} (existing index belongs to {2}, new unique index created by {3}) ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate unique index matching query: {1} (existing index belongs to {2}, new unique index created by {3})
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0}
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0} ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}" ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided. ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1} ca.uhn.fhir.jpa.dao.BaseStorageDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\")
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteBlockedBecauseDisabled=Resource deletion is not permitted on this server
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\") ca.uhn.fhir.jpa.dao.BaseStorageDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.deleteBlockedBecauseDisabled=Resource deletion is not permitted on this server ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithInvalidId=Can not process entity with ID[{0}], this is not a valid FHIR ID
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists ca.uhn.fhir.jpa.dao.BaseStorageDao.incorrectResourceType=Incorrect resource type detected for endpoint, found {0} but expected {1}
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithInvalidId=Can not process entity with ID[{0}], this is not a valid FHIR ID ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithClientAssignedNumericId=Can not create resource with ID[{0}], no resource with this ID exists and clients may only assign IDs which contain at least one non-numeric character
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.incorrectResourceType=Incorrect resource type detected for endpoint, found {0} but expected {1} ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithClientAssignedId=Can not create resource with ID[{0}], ID must not be supplied on a create (POST) operation
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithClientAssignedNumericId=Can not create resource with ID[{0}], no resource with this ID exists and clients may only assign IDs which contain at least one non-numeric character ca.uhn.fhir.jpa.dao.BaseStorageDao.failedToCreateWithClientAssignedIdNotAllowed=No resource exists on this server resource with ID[{0}], and client-assigned IDs are not enabled.
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithClientAssignedId=Can not create resource with ID[{0}], ID must not be supplied on a create (POST) operation ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidParameterChain=Invalid parameter chain: {0}
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.failedToCreateWithClientAssignedIdNotAllowed=No resource exists on this server resource with ID[{0}], and client-assigned IDs are not enabled. ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidVersion=Version "{0}" is not valid for resource {1}
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidParameterChain=Invalid parameter chain: {0} ca.uhn.fhir.jpa.dao.BaseStorageDao.multipleParamsWithSameNameOneIsMissingTrue=This server does not know how to handle multiple "{0}" parameters where one has a value of :missing=true
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidVersion=Version "{0}" is not valid for resource {1} ca.uhn.fhir.jpa.dao.BaseStorageDao.missingBody=No body was supplied in request
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.multipleParamsWithSameNameOneIsMissingTrue=This server does not know how to handle multiple "{0}" parameters where one has a value of :missing=true ca.uhn.fhir.jpa.dao.BaseStorageDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Deletion failed.
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.missingBody=No body was supplied in request ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreate=Successfully created resource "{0}" in {1}ms
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Deletion failed. ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdate=Successfully updated resource "{0}" in {1}ms
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulCreate=Successfully created resource "{0}" in {1}ms ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulDeletes=Successfully deleted {0} resource(s) in {1}ms
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulUpdate=Successfully updated resource "{0}" in {1}ms ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulDeletes=Successfully deleted {0} resource(s) in {1}ms ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2} ca.uhn.fhir.jpa.dao.BaseStorageDao.updateWithNoId=Can not update resource of type {0} as it has no ID
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.updateWithNoId=Can not update resource of type {0} as it has no ID
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidBundleTypeForStorage=Unable to store a Bundle resource on this server with a Bundle.type value of: {0}. Note that if you are trying to perform a FHIR 'transaction' or 'batch' operation you should POST the Bundle resource to the Base URL of the server, not to the '/Bundle' endpoint. ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidBundleTypeForStorage=Unable to store a Bundle resource on this server with a Bundle.type value of: {0}. Note that if you are trying to perform a FHIR 'transaction' or 'batch' operation you should POST the Bundle resource to the Base URL of the server, not to the '/Bundle' endpoint.
ca.uhn.fhir.rest.api.PatchTypeEnum.missingPatchContentType=Missing or invalid content type for PATCH operation ca.uhn.fhir.rest.api.PatchTypeEnum.missingPatchContentType=Missing or invalid content type for PATCH operation
ca.uhn.fhir.rest.api.PatchTypeEnum.invalidPatchContentType=Invalid Content-Type for PATCH operation: {0} ca.uhn.fhir.rest.api.PatchTypeEnum.invalidPatchContentType=Invalid Content-Type for PATCH operation: {0}
ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.unsupportedResourceType=Resource {0} is not supported on this server. Supported resource types: {1} ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.unsupportedResourceType=Resource {0} is not supported on this server. Supported resource types: {1}
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0} ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0}
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.missingPatchBody=Unable to determine PATCH body from request
ca.uhn.fhir.jpa.dao.TransactionProcessor.fhirPatchShouldNotUseBinaryResource=Binary PATCH detected with FHIR content type. FHIR Patch should use Parameters resource. ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.fhirPatchShouldNotUseBinaryResource=Binary PATCH detected with FHIR content type. FHIR Patch should use Parameters resource.
ca.uhn.fhir.jpa.patch.FhirPatch.invalidInsertIndex=Invalid insert index {0} for path {1} - Only have {2} existing entries ca.uhn.fhir.jpa.patch.FhirPatch.invalidInsertIndex=Invalid insert index {0} for path {1} - Only have {2} existing entries
ca.uhn.fhir.jpa.patch.FhirPatch.invalidMoveSourceIndex=Invalid move source index {0} for path {1} - Only have {2} existing entries ca.uhn.fhir.jpa.patch.FhirPatch.invalidMoveSourceIndex=Invalid move source index {0} for path {1} - Only have {2} existing entries
ca.uhn.fhir.jpa.patch.FhirPatch.invalidMoveDestinationIndex=Invalid move destination index {0} for path {1} - Only have {2} existing entries ca.uhn.fhir.jpa.patch.FhirPatch.invalidMoveDestinationIndex=Invalid move destination index {0} for path {1} - Only have {2} existing entries
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.externalReferenceNotAllowed=Resource contains external reference to URL "{0}" but this server is not configured to allow external references ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.externalReferenceNotAllowed=Resource contains external reference to URL "{0}" but this server is not configured to allow external references
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1} ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1}
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1} ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1} ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.sourceParamDisabled=The _source parameter is disabled on this server ca.uhn.fhir.jpa.dao.LegacySearchBuilder.sourceParamDisabled=The _source parameter is disabled on this server

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -54,6 +54,8 @@ public final class BatchConstants {
* MDM Clear * MDM Clear
*/ */
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob"; public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
public static final String BULK_EXPORT_READ_CHUNK_PARAMETER = "readChunkSize";
public static final String BULK_EXPORT_GROUP_ID_PARAMETER = "groupId";
/** /**
* This Set contains the step names across all job types that are appropriate for * This Set contains the step names across all job types that are appropriate for
* someone to look at the write count for that given step in order to determine the * someone to look at the write count for that given step in order to determine the

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath> <relativePath>../../hapi-deployable-pom</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -1,5 +1,5 @@
--- ---
type: change type: change
issue: 2446 issue: 2446
title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean)) title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean))
now defaults to `true`." now defaults to `true`."

View File

@ -0,0 +1,4 @@
---
type: add
issue: 2851
title: "Allows to upload not-current version of LOINC ValueSet(s)."

View File

@ -0,0 +1,4 @@
---
type: change
jira: SMILE-3128
title: "Prevent _expunge and _cascade from being used on the same DELETE operation"

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 3031
jira: SMILE-3178
title: "Fixes a bug that was causing a null pointer exception to be thrown during database migrations that add or drop indexes."

View File

@ -9,7 +9,7 @@ See the [Modules Page](/docs/getting_started/modules.html) for more information
* [Model API (R5)](/apidocs/hapi-fhir-structures-r5/) - hapi-fhir-structures-r5 * [Model API (R5)](/apidocs/hapi-fhir-structures-r5/) - hapi-fhir-structures-r5
* [Client API](/apidocs/hapi-fhir-client/) - hapi-fhir-client * [Client API](/apidocs/hapi-fhir-client/) - hapi-fhir-client
* [Plain Server API](/apidocs/hapi-fhir-server/) - hapi-fhir-server * [Plain Server API](/apidocs/hapi-fhir-server/) - hapi-fhir-server
* [JPA Server - API](/apidocs/hapi-fhir-storage-api/) - hapi-fhir-storage-api * [JPA Server - API](/apidocs/hapi-fhir-storage/) - hapi-fhir-storage
* [JPA Server - Model](/apidocs/hapi-fhir-jpaserver-model/) - hapi-fhir-jpaserver-model * [JPA Server - Model](/apidocs/hapi-fhir-jpaserver-model/) - hapi-fhir-jpaserver-model
* [JPA Server - Base](/apidocs/hapi-fhir-jpaserver-base/) - hapi-fhir-jpaserver-base * [JPA Server - Base](/apidocs/hapi-fhir-jpaserver-base/) - hapi-fhir-jpaserver-base
* [Version Converter API](/apidocs/hapi-fhir-converter/) - hapi-fhir-converter * [Version Converter API](/apidocs/hapi-fhir-converter/) - hapi-fhir-converter

View File

@ -61,7 +61,7 @@ but rather that this is an identifier for a ValueSet where `ValueSet.url` has th
HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical
property ( property (
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set))) see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set)))
. .
For example: For example:
@ -137,5 +137,5 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value ?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize()) of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
property. property.

View File

@ -26,7 +26,7 @@ The grouping of Observation resources by `Observation.code` means that the `$las
The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled
property ( property (
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean))) see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean)))
. .
In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters

View File

@ -246,11 +246,11 @@ an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE**
visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server. visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
If the server has been configured with If the server has been configured with
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum)) a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
of [UUID](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or of [UUID](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or the
the server has been configured with server has been configured with
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum)) a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum))
of [ANY](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY) of [ANY](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY)
the server will create a Forced ID for all resources (not only resources having textual IDs). the server will create a Forced ID for all resources (not only resources having textual IDs).
## Columns ## Columns

View File

@ -26,8 +26,8 @@ One important caveat is that chaining is currently not supported when using this
## Enabling MDM Expansion ## Enabling MDM Expansion
On top of needing to instantiate an MDM module, you must enable this feature in On top of needing to instantiate an MDM module, you must enable this feature in
the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean))
property. property.
<div class="helpWarningCalloutBox"> <div class="helpWarningCalloutBox">

View File

@ -601,7 +601,7 @@ This operation takes two optional Parameters.
<td>0..1</td> <td>0..1</td>
<td> <td>
The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value
of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize()) of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
property. property.
</td> </td>
</tr> </tr>

View File

@ -57,7 +57,7 @@ This fact can have security implications:
in use in another partition. in use in another partition.
* In a server using the default configuration of * In a server using the default configuration of
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum)) SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned. a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned.
These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs. These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs.

View File

@ -38,7 +38,12 @@ This means that:
# Using the Repository Validating Interceptor # Using the Repository Validating Interceptor
Using the repository validating interceptor is as simple as creating a new instance of [RepositoryValidatingInterceptor](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.html) and registering it with the interceptor registry. The only tricky part is initializing your rules, which must be done using a [RepositoryValidatingRuleBuilder](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.html). Using the repository validating interceptor is as simple as creating a new instance
of [RepositoryValidatingInterceptor](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.html)
and registering it with the interceptor registry. The only tricky part is initializing your rules, which must be done
using
a [RepositoryValidatingRuleBuilder](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.html)
.
The rule builder must be obtained from the Spring context, as shown below: The rule builder must be obtained from the Spring context, as shown below:

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -118,7 +118,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-storage-api</artifactId> <artifactId>hapi-fhir-storage</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version> <version>5.6.0-PRE7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -123,6 +123,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy; import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.search.reindex.BlockPolicy;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer; import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl; import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
@ -217,7 +218,6 @@ public abstract class BaseConfig {
public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider"; public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider";
public static final String SEARCH_BUILDER = "SearchBuilder"; public static final String SEARCH_BUILDER = "SearchBuilder";
public static final String HISTORY_BUILDER = "HistoryBuilder"; public static final String HISTORY_BUILDER = "HistoryBuilder";
public static final String REPOSITORY_VALIDATING_RULE_BUILDER = "repositoryValidatingRuleBuilder";
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI"; private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
@Autowired @Autowired
protected Environment myEnv; protected Environment myEnv;
@ -404,7 +404,7 @@ public abstract class BaseConfig {
asyncTaskExecutor.setQueueCapacity(0); asyncTaskExecutor.setQueueCapacity(0);
asyncTaskExecutor.setAllowCoreThreadTimeOut(true); asyncTaskExecutor.setAllowCoreThreadTimeOut(true);
asyncTaskExecutor.setThreadNamePrefix("JobLauncher-"); asyncTaskExecutor.setThreadNamePrefix("JobLauncher-");
asyncTaskExecutor.setRejectedExecutionHandler(new ResourceReindexingSvcImpl.BlockPolicy()); asyncTaskExecutor.setRejectedExecutionHandler(new BlockPolicy());
asyncTaskExecutor.initialize(); asyncTaskExecutor.initialize();
return asyncTaskExecutor; return asyncTaskExecutor;
} }
@ -639,7 +639,7 @@ public abstract class BaseConfig {
return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest); return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest);
} }
@Bean(name = REPOSITORY_VALIDATING_RULE_BUILDER) @Bean(name = RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER)
@Scope("prototype") @Scope("prototype")
public RepositoryValidatingRuleBuilder repositoryValidatingRuleBuilder() { public RepositoryValidatingRuleBuilder repositoryValidatingRuleBuilder() {
return new RepositoryValidatingRuleBuilder(); return new RepositoryValidatingRuleBuilder();

View File

@ -6,10 +6,9 @@ import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport; import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.TransactionProcessorVersionAdapterDstu2; import ca.uhn.fhir.jpa.dao.TransactionProcessorVersionAdapterDstu2;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.term.TermReadSvcDstu2; import ca.uhn.fhir.jpa.term.TermReadSvcDstu2;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.jpa.util.ResourceCountCache;
@ -97,7 +96,7 @@ public class BaseDstu2Config extends BaseConfig {
} }
@Bean @Bean
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
return new TransactionProcessorVersionAdapterDstu2(); return new TransactionProcessorVersionAdapterDstu2();
} }

View File

@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus; import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
import ca.uhn.fhir.jpa.dao.dstu3.TransactionProcessorVersionAdapterDstu3; import ca.uhn.fhir.jpa.dao.dstu3.TransactionProcessorVersionAdapterDstu3;
import ca.uhn.fhir.jpa.provider.GraphQLProvider; import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl; import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
@ -83,7 +83,7 @@ public class BaseDstu3Config extends BaseConfigDstu3Plus {
} }
@Bean @Bean
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
return new TransactionProcessorVersionAdapterDstu3(); return new TransactionProcessorVersionAdapterDstu3();
} }

View File

@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus; import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4; import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.provider.GraphQLProvider; import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl; import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
@ -78,7 +78,7 @@ public class BaseR4Config extends BaseConfigDstu3Plus {
} }
@Bean @Bean
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
return new TransactionProcessorVersionAdapterR4(); return new TransactionProcessorVersionAdapterR4();
} }

View File

@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus; import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
import ca.uhn.fhir.jpa.dao.r5.TransactionProcessorVersionAdapterR5; import ca.uhn.fhir.jpa.dao.r5.TransactionProcessorVersionAdapterR5;
import ca.uhn.fhir.jpa.provider.GraphQLProvider; import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl; import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
@ -76,7 +76,7 @@ public class BaseR5Config extends BaseConfigDstu3Plus {
} }
@Bean @Bean
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
return new TransactionProcessorVersionAdapterR5(); return new TransactionProcessorVersionAdapterR5();
} }

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer; import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor; import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService; import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.entity.ResourceSearchView;
@ -77,7 +78,6 @@ import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
@ -181,14 +181,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
public static final long INDEX_STATUS_INDEXED = 1L; public static final long INDEX_STATUS_INDEXED = 1L;
public static final long INDEX_STATUS_INDEXING_FAILED = 2L; public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile"; public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile";
public static final String OO_SEVERITY_ERROR = "error";
public static final String OO_SEVERITY_INFO = "information";
public static final String OO_SEVERITY_WARN = "warning";
public static final String XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS = BaseHapiFhirDao.class.getName() + "_RESOLVED_TAG_DEFINITIONS";
public static final String XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS = BaseHapiFhirDao.class.getName() + "_EXISTING_SEARCH_PARAMS";
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class); private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
private static final Map<FhirVersionEnum, FhirContext> ourRetrievalContexts = new HashMap<>(); private static final Map<FhirVersionEnum, FhirContext> ourRetrievalContexts = new HashMap<>();
private static final String PROCESSING_SUB_REQUEST = "BaseHapiFhirDao.processingSubRequest";
private static boolean ourValidationDisabledForUnitTest; private static boolean ourValidationDisabledForUnitTest;
private static boolean ourDisableIncrementOnUpdateForUnitTest = false; private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
@ -429,7 +423,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (retVal == null) { if (retVal == null) {
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, () -> new HashMap<>()); HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, () -> new HashMap<>());
retVal = resolvedTagDefinitions.get(key); retVal = resolvedTagDefinitions.get(key);
if (retVal == null) { if (retVal == null) {
@ -508,19 +502,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return LogicalReferenceHelper.isLogicalReference(myConfig.getModelConfig(), theId); return LogicalReferenceHelper.isLogicalReference(myConfig.getModelConfig(), theId);
} }
public void notifyInterceptors(RestOperationTypeEnum theOperationType, ActionRequestDetails theRequestDetails) {
if (theRequestDetails.getId() != null && theRequestDetails.getId().hasResourceType() && isNotBlank(theRequestDetails.getResourceType())) {
if (theRequestDetails.getId().getResourceType().equals(theRequestDetails.getResourceType()) == false) {
throw new InternalErrorException(
"Inconsistent server state - Resource types don't match: " + theRequestDetails.getId().getResourceType() + " / " + theRequestDetails.getResourceType());
}
}
if (theRequestDetails.getUserData().get(PROCESSING_SUB_REQUEST) == Boolean.TRUE) {
theRequestDetails.notifyIncomingRequestPreHandled(theOperationType);
}
}
/** /**
* Returns true if the resource has changed (either the contents or the tags) * Returns true if the resource has changed (either the contents or the tags)
*/ */
@ -1195,7 +1176,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// CREATE or UPDATE // CREATE or UPDATE
IdentityHashMap<ResourceTable, ResourceIndexedSearchParams> existingSearchParams = theTransactionDetails.getOrCreateUserData(XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS, () -> new IdentityHashMap<>()); IdentityHashMap<ResourceTable, ResourceIndexedSearchParams> existingSearchParams = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS, () -> new IdentityHashMap<>());
existingParams = existingSearchParams.get(entity); existingParams = existingSearchParams.get(entity);
if (existingParams == null) { if (existingParams == null) {
existingParams = new ResourceIndexedSearchParams(entity); existingParams = new ResourceIndexedSearchParams(entity);
@ -1682,18 +1663,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return retVal.toString(); return retVal.toString();
} }
public static void clearRequestAsProcessingSubRequest(RequestDetails theRequestDetails) {
if (theRequestDetails != null) {
theRequestDetails.getUserData().remove(PROCESSING_SUB_REQUEST);
}
}
public static void markRequestAsProcessingSubRequest(RequestDetails theRequestDetails) {
if (theRequestDetails != null) {
theRequestDetails.getUserData().put(PROCESSING_SUB_REQUEST, Boolean.TRUE);
}
}
public static void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity) { public static void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity) {
if (theEntity.getDeleted() != null) { if (theEntity.getDeleted() != null) {
theEntity.setNarrativeText(null); theEntity.setNarrativeText(null);

View File

@ -36,7 +36,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService; import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag; import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.ForcedId; import ca.uhn.fhir.jpa.model.entity.ForcedId;
@ -86,6 +86,7 @@ import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
import ca.uhn.fhir.rest.server.RestfulServerUtils; import ca.uhn.fhir.rest.server.RestfulServerUtils;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
@ -136,12 +137,9 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
@ -171,7 +169,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Autowired @Autowired
private IRequestPartitionHelperSvc myRequestPartitionHelperService; private IRequestPartitionHelperSvc myRequestPartitionHelperService;
@Autowired @Autowired
private HapiTransactionService myTransactionService; protected HapiTransactionService myTransactionService;
@Autowired @Autowired
private MatchUrlService myMatchUrlService; private MatchUrlService myMatchUrlService;
@Autowired @Autowired
@ -223,9 +221,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
/** /**
* Called for FHIR create (POST) operations * Called for FHIR create (POST) operations
*/ */
private DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) { protected DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
if (theResource == null) { if (theResource == null) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "missingBody"); String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody");
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }
@ -385,7 +383,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
outcome.setId(theResource.getIdElement()); outcome.setId(theResource.getIdElement());
} }
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart()); String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
outcome.setOperationOutcome(createInfoOperationOutcome(msg)); outcome.setOperationOutcome(createInfoOperationOutcome(msg));
String forcedId = null; String forcedId = null;
@ -417,7 +415,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
protected String getMessageSanitized(String theKey, String theIdPart) { protected String getMessageSanitized(String theKey, String theIdPart) {
return getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, theKey, theIdPart); return getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, theKey, theIdPart);
} }
private boolean isSystemRequest(RequestDetails theRequest) { private boolean isSystemRequest(RequestDetails theRequest) {
@ -450,7 +448,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
DaoMethodOutcome retVal = delete(theId, deleteConflicts, theRequestDetails, transactionDetails); DaoMethodOutcome retVal = delete(theId, deleteConflicts, theRequestDetails, transactionDetails);
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts); DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
ourLog.debug("Processed delete on {} in {}ms", theId.getValue(), w.getMillisAndRestart()); ourLog.debug("Processed delete on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
return retVal; return retVal;
@ -477,7 +475,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
outcome.setId(id); outcome.setId(id);
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext()); IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext());
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", 1, 0); String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", 1, 0);
String severity = "information"; String severity = "information";
String code = "informational"; String code = "informational";
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code); OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
@ -526,7 +524,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete).setCreated(true); DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete).setCreated(true);
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext()); IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext());
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", 1, w.getMillis()); String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", 1, w.getMillis());
String severity = "information"; String severity = "information";
String code = "informational"; String code = "informational";
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code); OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
@ -549,7 +547,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return myTransactionService.execute(theRequest, transactionDetails, tx -> { return myTransactionService.execute(theRequest, transactionDetails, tx -> {
DeleteConflictList deleteConflicts = new DeleteConflictList(); DeleteConflictList deleteConflicts = new DeleteConflictList();
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest); DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest);
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts); DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
return outcome; return outcome;
}); });
} }
@ -588,6 +586,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
throw new MethodNotAllowedException("_expunge is not enabled on this server: " + getConfig().cannotDeleteExpungeReason()); throw new MethodNotAllowedException("_expunge is not enabled on this server: " + getConfig().cannotDeleteExpungeReason());
} }
if (theUrl.contains(Constants.PARAMETER_CASCADE_DELETE) || (theRequest.getHeader(Constants.HEADER_CASCADE) != null && theRequest.getHeader(Constants.HEADER_CASCADE).equals(Constants.CASCADE_DELETE))) {
throw new InvalidRequestException("_expunge cannot be used with _cascade");
}
List<String> urlsToDeleteExpunge = Collections.singletonList(theUrl); List<String> urlsToDeleteExpunge = Collections.singletonList(theUrl);
try { try {
JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), urlsToDeleteExpunge, theRequest); JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), urlsToDeleteExpunge, theRequest);
@ -655,7 +657,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code); OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
} else { } else {
oo = OperationOutcomeUtil.newInstance(getContext()); oo = OperationOutcomeUtil.newInstance(getContext());
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", deletedResources.size(), w.getMillis()); String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", deletedResources.size(), w.getMillis());
String severity = "information"; String severity = "information";
String code = "informational"; String code = "informational";
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code); OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
@ -671,7 +673,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
private void validateDeleteEnabled() { private void validateDeleteEnabled() {
if (!getConfig().isDeleteEnabled()) { if (!getConfig().isDeleteEnabled()) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "deleteBlockedBecauseDisabled"); String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "deleteBlockedBecauseDisabled");
throw new PreconditionFailedException(msg); throw new PreconditionFailedException(msg);
} }
} }
@ -1265,7 +1267,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
if (theId.hasVersionIdPart()) { if (theId.hasVersionIdPart()) {
if (theId.isVersionIdPartValidLong() == false) { if (theId.isVersionIdPartValidLong() == false) {
throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless())); throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
} }
if (entity.getVersion() != theId.getVersionIdPartAsLong()) { if (entity.getVersion() != theId.getVersionIdPartAsLong()) {
entity = null; entity = null;
@ -1281,7 +1283,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
try { try {
entity = q.getSingleResult(); entity = q.getSingleResult();
} catch (NoResultException e) { } catch (NoResultException e) {
throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless())); throw new ResourceNotFoundException(getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
} }
} }
} }
@ -1586,12 +1588,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Override @Override
public DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, @Nonnull TransactionDetails theTransactionDetails) { public DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, @Nonnull TransactionDetails theTransactionDetails) {
if (theResource == null) { if (theResource == null) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "missingBody"); String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody");
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }
if (!theResource.getIdElement().hasIdPart() && isBlank(theMatchUrl)) { if (!theResource.getIdElement().hasIdPart() && isBlank(theMatchUrl)) {
String type = myFhirContext.getResourceType(theResource); String type = myFhirContext.getResourceType(theResource);
String msg = myFhirContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "updateWithNoId", type); String msg = myFhirContext.getLocalizer().getMessage(BaseStorageDao.class, "updateWithNoId", type);
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }
@ -1731,7 +1733,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
outcome.setId(id); outcome.setId(id);
} }
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart()); String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart());
outcome.setOperationOutcome(createInfoOperationOutcome(msg)); outcome.setOperationOutcome(createInfoOperationOutcome(msg));
ourLog.debug(msg); ourLog.debug(msg);
@ -1760,7 +1762,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
if (getConfig().isEnforceReferentialIntegrityOnDelete()) { if (getConfig().isEnforceReferentialIntegrityOnDelete()) {
myDeleteConflictService.validateOkToDelete(deleteConflicts, entity, true, theRequest, new TransactionDetails()); myDeleteConflictService.validateOkToDelete(deleteConflicts, entity, true, theRequest, new TransactionDetails());
} }
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts); DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete"); IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete");
return new MethodOutcome(new IdDt(theId.getValue()), oo); return new MethodOutcome(new IdDt(theId.getValue()), oo);
@ -1788,7 +1790,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
if (resourceToValidateById != null) { if (resourceToValidateById != null) {
result = validator.validateWithResult(resourceToValidateById, options); result = validator.validateWithResult(resourceToValidateById, options);
} else { } else {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "cantValidateWithNoResource"); String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "cantValidateWithNoResource");
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }
} else if (isNotBlank(theRawResource)) { } else if (isNotBlank(theRawResource)) {

View File

@ -24,15 +24,21 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.TermReadSvcUtil;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.SortOrderEnum; import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -41,6 +47,7 @@ import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.ImplementationGuide; import org.hl7.fhir.r4.model.ImplementationGuide;
import org.hl7.fhir.r4.model.Questionnaire; import org.hl7.fhir.r4.model.Questionnaire;
import org.hl7.fhir.r4.model.StructureDefinition; import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.UriType;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -51,10 +58,13 @@ import javax.annotation.PostConstruct;
import javax.transaction.Transactional; import javax.transaction.Transactional;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_GENERIC_VALUESET_URL_PLUS_SLASH;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
/** /**
* This class is a {@link IValidationSupport Validation support} module that loads * This class is a {@link IValidationSupport Validation support} module that loads
@ -71,6 +81,10 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
@Autowired @Autowired
private DaoRegistry myDaoRegistry; private DaoRegistry myDaoRegistry;
@Autowired
private ITermReadSvc myTermReadSvc;
private Class<? extends IBaseResource> myCodeSystemType; private Class<? extends IBaseResource> myCodeSystemType;
private Class<? extends IBaseResource> myStructureDefinitionType; private Class<? extends IBaseResource> myStructureDefinitionType;
private Class<? extends IBaseResource> myValueSetType; private Class<? extends IBaseResource> myValueSetType;
@ -92,14 +106,55 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
@Override @Override
public IBaseResource fetchCodeSystem(String theSystem) { public IBaseResource fetchCodeSystem(String theSystem) {
if (TermReadSvcUtil.isLoincNotGenericUnversionedCodeSystem(theSystem)) {
Optional<IBaseResource> currentCSOpt = getCodeSystemCurrentVersion(new UriType(theSystem));
if (! currentCSOpt.isPresent()) {
ourLog.info("Couldn't find current version of CodeSystem: " + theSystem);
}
return currentCSOpt.orElse(null);
}
return fetchResource(myCodeSystemType, theSystem); return fetchResource(myCodeSystemType, theSystem);
} }
/**
* Obtains the current version of a CodeSystem using the fact that the current
* version is always pointed by the ForcedId for the no-versioned CS
*/
private Optional<IBaseResource> getCodeSystemCurrentVersion(UriType theUrl) {
if (! theUrl.getValueAsString().contains(LOINC_LOW)) return Optional.empty();
return myTermReadSvc.readCodeSystemByForcedId(LOINC_LOW);
}
@Override @Override
public IBaseResource fetchValueSet(String theSystem) { public IBaseResource fetchValueSet(String theSystem) {
if (TermReadSvcUtil.isLoincNotGenericUnversionedValueSet(theSystem)) {
Optional<IBaseResource> currentVSOpt = getValueSetCurrentVersion(new UriType(theSystem));
return currentVSOpt.orElseThrow(() -> new ResourceNotFoundException(
"Unable to find current version of ValueSet for url: " + theSystem));
}
return fetchResource(myValueSetType, theSystem); return fetchResource(myValueSetType, theSystem);
} }
/**
* Obtains the current version of a ValueSet using the fact that the current
* version is always pointed by the ForcedId for the no-versioned VS
*/
private Optional<IBaseResource> getValueSetCurrentVersion(UriType theUrl) {
if (TermReadSvcUtil.mustReturnEmptyValueSet(theUrl.getValueAsString())) return Optional.empty();
String forcedId = theUrl.getValue().substring(LOINC_GENERIC_VALUESET_URL_PLUS_SLASH.length());
if (StringUtils.isBlank(forcedId)) return Optional.empty();
IFhirResourceDao<? extends IBaseResource> valueSetResourceDao = myDaoRegistry.getResourceDao(myValueSetType);
IBaseResource valueSet = valueSetResourceDao.read(new IdDt("ValueSet", forcedId));
return Optional.ofNullable(valueSet);
}
@Override @Override
public IBaseResource fetchStructureDefinition(String theUrl) { public IBaseResource fetchStructureDefinition(String theUrl) {
return fetchResource(myStructureDefinitionType, theUrl); return fetchResource(myStructureDefinitionType, theUrl);

View File

@ -38,7 +38,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
public class TransactionProcessorVersionAdapterDstu2 implements TransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.Entry> { public class TransactionProcessorVersionAdapterDstu2 implements ITransactionProcessorVersionAdapter<Bundle, Bundle.Entry> {
@Override @Override
public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) { public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) {
theBundleEntry.getResponse().setStatus(theStatus); theBundleEntry.getResponse().setStatus(theStatus);

View File

@ -47,6 +47,13 @@ public interface ITermValueSetDao extends JpaRepository<TermValueSet, Long> {
@Query(value="SELECT vs FROM TermValueSet vs INNER JOIN ResourceTable r ON r.myId = vs.myResourcePid WHERE vs.myUrl = :url ORDER BY r.myUpdated DESC") @Query(value="SELECT vs FROM TermValueSet vs INNER JOIN ResourceTable r ON r.myId = vs.myResourcePid WHERE vs.myUrl = :url ORDER BY r.myUpdated DESC")
List<TermValueSet> findTermValueSetByUrl(Pageable thePage, @Param("url") String theUrl); List<TermValueSet> findTermValueSetByUrl(Pageable thePage, @Param("url") String theUrl);
/**
* The current TermValueSet is not necessarily the last uploaded anymore, but the current VS resource
* is pointed by a specific ForcedId, so we locate current ValueSet as the one pointing to current VS resource
*/
@Query(value="SELECT vs FROM ForcedId f, TermValueSet vs where f.myForcedId = :forcedId and vs.myResource = f.myResource")
Optional<TermValueSet> findTermValueSetByForcedId(@Param("forcedId") String theForcedId);
@Query("SELECT vs FROM TermValueSet vs WHERE vs.myUrl = :url AND vs.myVersion IS NULL") @Query("SELECT vs FROM TermValueSet vs WHERE vs.myUrl = :url AND vs.myVersion IS NULL")
Optional<TermValueSet> findTermValueSetByUrlAndNullVersion(@Param("url") String theUrl); Optional<TermValueSet> findTermValueSetByUrlAndNullVersion(@Param("url") String theUrl);

View File

@ -34,7 +34,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
@ -48,9 +48,7 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams; import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.util.SourceParam; import ca.uhn.fhir.jpa.searchparam.util.SourceParam;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.model.api.IQueryParameterOr; import ca.uhn.fhir.model.api.IQueryParameterOr;
import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.IQueryParameterType;
@ -75,6 +73,8 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -96,7 +96,6 @@ import javax.persistence.criteria.Subquery;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
import java.util.Set; import java.util.Set;
@ -347,7 +346,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
} }
if (!foundChainMatch) { if (!foundChainMatch) {
throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain())); throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
} }
if (candidateTargetTypes.size() > 1) { if (candidateTargetTypes.size() > 1) {
@ -684,7 +683,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
} else { } else {
Collection<String> validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); Collection<String> validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
String msg = myContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", theParamName, theResourceName, validNames); String msg = myContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", theParamName, theResourceName, validNames);
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }
} }

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.context.support.IValidationSupport.CodeValidationResult; import ca.uhn.fhir.context.support.IValidationSupport.CodeValidationResult;
import ca.uhn.fhir.context.support.ValidationSupportContext; import ca.uhn.fhir.context.support.ValidationSupportContext;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -46,13 +47,16 @@ import org.hl7.fhir.r4.model.Coding;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.security.InvalidParameterException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import static ca.uhn.fhir.jpa.dao.FhirResourceDaoValueSetDstu2.toStringOrNull; import static ca.uhn.fhir.jpa.dao.FhirResourceDaoValueSetDstu2.toStringOrNull;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
public class FhirResourceDaoCodeSystemR4 extends BaseHapiFhirResourceDao<CodeSystem> implements IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> { public class FhirResourceDaoCodeSystemR4 extends BaseHapiFhirResourceDao<CodeSystem> implements IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> {
@ -173,4 +177,17 @@ public class FhirResourceDaoCodeSystemR4 extends BaseHapiFhirResourceDao<CodeSys
} }
@Override
public DaoMethodOutcome create(CodeSystem theResource, String theIfNoneExist, boolean thePerformIndexing,
@Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
// loinc CodeSystem must have an ID
if (isNotBlank(theResource.getUrl()) && theResource.getUrl().contains(LOINC_LOW)
&& isBlank(theResource.getIdElement().getIdPart())) {
throw new InvalidParameterException("'loinc' CodeSystem must have an ID");
}
return myTransactionService.execute(theRequestDetails, theTransactionDetails,
tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails));
}
} }

View File

@ -28,7 +28,6 @@ import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;

View File

@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.dao.r5;
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
@ -34,7 +34,7 @@ import org.hl7.fhir.r5.model.Resource;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
public class TransactionProcessorVersionAdapterR5 implements TransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> { public class TransactionProcessorVersionAdapterR5 implements ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> {
@Override @Override
public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) { public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) {
theBundleEntry.getResponse().setStatus(theStatus); theBundleEntry.getResponse().setStatus(theStatus);

View File

@ -27,16 +27,16 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteConflict; import ca.uhn.fhir.jpa.api.model.DeleteConflict;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList; import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.OperationOutcomeUtil; import ca.uhn.fhir.util.OperationOutcomeUtil;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
@ -64,35 +64,6 @@ public class DeleteConflictService {
@Autowired @Autowired
private FhirContext myFhirContext; private FhirContext myFhirContext;
public int validateOkToDelete(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
// We want the list of resources that are marked to be the same list even as we
// drill into conflict resolution stacks.. this allows us to not get caught by
// circular references
DeleteConflictList newConflicts = new DeleteConflictList(theDeleteConflicts);
// In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
// Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
DeleteConflictOutcome outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT, theTransactionDetails);
int retryCount = 0;
while (outcome != null) {
int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS);
if (!(retryCount < shouldRetryCount)) break;
newConflicts = new DeleteConflictList(newConflicts);
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, myDaoConfig.getMaximumDeleteConflictQueryCount(), theTransactionDetails);
++retryCount;
}
theDeleteConflicts.addAll(newConflicts);
if (retryCount >= MAX_RETRY_ATTEMPTS && !theDeleteConflicts.isEmpty()) {
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext);
OperationOutcomeUtil.addIssue(myFhirContext, oo, BaseHapiFhirDao.OO_SEVERITY_ERROR, MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, null, "processing");
throw new ResourceVersionConflictException(MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, oo);
}
return retryCount;
}
private DeleteConflictOutcome findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount, TransactionDetails theTransactionDetails) { private DeleteConflictOutcome findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount, TransactionDetails theTransactionDetails) {
List<ResourceLink> resultList = myDeleteConflictFinderService.findConflicts(theEntity, theMinQueryResultCount); List<ResourceLink> resultList = myDeleteConflictFinderService.findConflicts(theEntity, theMinQueryResultCount);
if (resultList.isEmpty()) { if (resultList.isEmpty()) {
@ -139,35 +110,33 @@ public class DeleteConflictService {
} }
} }
public static void validateDeleteConflictsEmptyOrThrowException(FhirContext theFhirContext, DeleteConflictList theDeleteConflicts) { public int validateOkToDelete(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
IBaseOperationOutcome oo = null;
String firstMsg = null;
for (DeleteConflict next : theDeleteConflicts) { // We want the list of resources that are marked to be the same list even as we
// drill into conflict resolution stacks.. this allows us to not get caught by
// circular references
DeleteConflictList newConflicts = new DeleteConflictList(theDeleteConflicts);
if (theDeleteConflicts.isResourceIdToIgnoreConflict(next.getTargetId())) { // In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
continue; // Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
DeleteConflictOutcome outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT, theTransactionDetails);
int retryCount = 0;
while (outcome != null) {
int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS);
if (!(retryCount < shouldRetryCount)) break;
newConflicts = new DeleteConflictList(newConflicts);
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, myDaoConfig.getMaximumDeleteConflictQueryCount(), theTransactionDetails);
++retryCount;
} }
theDeleteConflicts.addAll(newConflicts);
String msg = "Unable to delete " + if (retryCount >= MAX_RETRY_ATTEMPTS && !theDeleteConflicts.isEmpty()) {
next.getTargetId().toUnqualifiedVersionless().getValue() + IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext);
" because at least one resource has a reference to this resource. First reference found was resource " + OperationOutcomeUtil.addIssue(myFhirContext, oo, BaseStorageDao.OO_SEVERITY_ERROR, MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, null, "processing");
next.getSourceId().toUnqualifiedVersionless().getValue() + throw new ResourceVersionConflictException(MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, oo);
" in path " +
next.getSourcePath();
if (firstMsg == null) {
firstMsg = msg;
oo = OperationOutcomeUtil.newInstance(theFhirContext);
} }
OperationOutcomeUtil.addIssue(theFhirContext, oo, BaseHapiFhirDao.OO_SEVERITY_ERROR, msg, null, "processing"); return retryCount;
}
if (firstMsg == null) {
return;
}
throw new ResourceVersionConflictException(firstMsg, oo);
} }
@VisibleForTesting @VisibleForTesting

View File

@ -1,9 +1,13 @@
package ca.uhn.fhir.jpa.provider.r4; package ca.uhn.fhir.jpa.provider.r4;
import java.util.List; import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import javax.servlet.http.HttpServletRequest; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.BaseJpaResourceProviderValueSetDstu2;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeType; import org.hl7.fhir.r4.model.CodeType;
@ -14,6 +18,9 @@ import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.UriType; import org.hl7.fhir.r4.model.UriType;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/* /*
* #%L * #%L
* HAPI FHIR JPA Server * HAPI FHIR JPA Server
@ -34,15 +41,6 @@ import org.hl7.fhir.r4.model.UriType;
* #L% * #L%
*/ */
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.BaseJpaResourceProviderValueSetDstu2;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
public class BaseJpaResourceProviderCodeSystemR4 extends JpaResourceProviderR4<CodeSystem> { public class BaseJpaResourceProviderCodeSystemR4 extends JpaResourceProviderR4<CodeSystem> {
/** /**

View File

@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderToken; import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderToken;
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
@ -109,11 +109,9 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -443,7 +441,7 @@ public class QueryStack {
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName); RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName);
if (searchParam == null) { if (searchParam == null) {
Collection<String> validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); Collection<String> validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", paramName, theResourceName, validNames); String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", paramName, theResourceName, validNames);
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }
RestSearchParameterTypeEnum typeEnum = searchParam.getParamType(); RestSearchParameterTypeEnum typeEnum = searchParam.getParamType();
@ -1204,7 +1202,7 @@ public class QueryStack {
} }
} else { } else {
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", theParamName, theResourceName, mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName)); String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", theParamName, theResourceName, mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName));
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }
} }

View File

@ -35,7 +35,7 @@ import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.ISearchBuilder;
@ -51,6 +51,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.SearchConstants;
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
@ -138,7 +139,8 @@ public class SearchBuilder implements ISearchBuilder {
* for an explanation of why we use the constant 800 * for an explanation of why we use the constant 800
*/ */
// NB: keep public // NB: keep public
public static final int MAXIMUM_PAGE_SIZE = 800; @Deprecated
public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE;
public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50; public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50;
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
private static final ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L); private static final ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L);
@ -547,7 +549,7 @@ public class SearchBuilder implements ISearchBuilder {
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName()); RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
if (param == null) { if (param == null) {
String msg = myContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSortParameter", theSort.getParamName(), getResourceName(), mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(getResourceName())); String msg = myContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSortParameter", theSort.getParamName(), getResourceName(), mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(getResourceName()));
throw new InvalidRequestException(msg); throw new InvalidRequestException(msg);
} }

View File

@ -34,7 +34,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference; import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
@ -400,7 +400,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
} }
if (!foundChainMatch) { if (!foundChainMatch) {
throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain())); throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
} }
candidateTargetTypes.add(nextType); candidateTargetTypes.add(nextType);
@ -414,7 +414,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
} }
if (candidateTargetTypes.isEmpty()) { if (candidateTargetTypes.isEmpty()) {
throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain())); throw new InvalidRequestException(getFhirContext().getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain()));
} }
if (candidateTargetTypes.size() > 1) { if (candidateTargetTypes.size() > 1) {

View File

@ -42,7 +42,6 @@ import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.InstantType;
import javax.annotation.Nullable;
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionContext;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -54,6 +53,7 @@ import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContext;
@ -142,29 +142,6 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
); );
} }
/**
* A handler for rejected tasks that will have the caller block until space is available.
* This was stolen from old hibernate search(5.X.X), as it has been removed in HS6. We can probably come up with a better solution though.
*/
public static class BlockPolicy implements RejectedExecutionHandler {
/**
* Puts the Runnable to the blocking queue, effectively blocking the delegating thread until space is available.
*
* @param r the runnable task requested to be executed
* @param e the executor attempting to execute this task
*/
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
try {
e.getQueue().put( r );
} catch (InterruptedException e1) {
ourLog.error("Interrupted Execption for task: {}",r, e1 );
Thread.currentThread().interrupt();
}
}
}
public void scheduleJob() { public void scheduleJob() {
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();

View File

@ -28,6 +28,7 @@ import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
@ -54,6 +55,7 @@ import ca.uhn.fhir.jpa.entity.TermConceptPropertyTypeEnum;
import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
@ -123,6 +125,7 @@ import org.quartz.JobExecutionContext;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice; import org.springframework.data.domain.Slice;
import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionDefinition;
@ -138,6 +141,7 @@ import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.NonUniqueResultException;
import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType; import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery; import javax.persistence.TypedQuery;
@ -157,6 +161,7 @@ import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
@ -176,6 +181,8 @@ import static org.apache.commons.lang3.StringUtils.isNoneBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.lowerCase; import static org.apache.commons.lang3.StringUtils.lowerCase;
import static org.apache.commons.lang3.StringUtils.startsWithIgnoreCase; import static org.apache.commons.lang3.StringUtils.startsWithIgnoreCase;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_GENERIC_VALUESET_URL_PLUS_SLASH;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
public abstract class BaseTermReadSvcImpl implements ITermReadSvc { public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
public static final int DEFAULT_FETCH_SIZE = 250; public static final int DEFAULT_FETCH_SIZE = 250;
@ -196,7 +203,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
@Autowired @Autowired
protected ITermConceptDesignationDao myConceptDesignationDao; protected ITermConceptDesignationDao myConceptDesignationDao;
@Autowired @Autowired
protected ITermValueSetDao myValueSetDao; protected ITermValueSetDao myTermValueSetDao;
@Autowired @Autowired
protected ITermValueSetConceptDao myValueSetConceptDao; protected ITermValueSetConceptDao myValueSetConceptDao;
@Autowired @Autowired
@ -339,7 +346,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
public void deleteValueSetForResource(ResourceTable theResourceTable) { public void deleteValueSetForResource(ResourceTable theResourceTable) {
// Get existing entity so it can be deleted. // Get existing entity so it can be deleted.
Optional<TermValueSet> optionalExistingTermValueSetById = myValueSetDao.findByResourcePid(theResourceTable.getId()); Optional<TermValueSet> optionalExistingTermValueSetById = myTermValueSetDao.findByResourcePid(theResourceTable.getId());
if (optionalExistingTermValueSetById.isPresent()) { if (optionalExistingTermValueSetById.isPresent()) {
TermValueSet existingTermValueSet = optionalExistingTermValueSetById.get(); TermValueSet existingTermValueSet = optionalExistingTermValueSetById.get();
@ -347,7 +354,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId()); ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId());
myValueSetConceptDesignationDao.deleteByTermValueSetId(existingTermValueSet.getId()); myValueSetConceptDesignationDao.deleteByTermValueSetId(existingTermValueSet.getId());
myValueSetConceptDao.deleteByTermValueSetId(existingTermValueSet.getId()); myValueSetConceptDao.deleteByTermValueSetId(existingTermValueSet.getId());
myValueSetDao.deleteById(existingTermValueSet.getId()); myTermValueSetDao.deleteById(existingTermValueSet.getId());
ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId()); ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId());
} }
} }
@ -443,14 +450,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
Optional<TermValueSet> optionalTermValueSet; Optional<TermValueSet> optionalTermValueSet;
if (theValueSetToExpand.hasUrl()) { if (theValueSetToExpand.hasUrl()) {
if (theValueSetToExpand.hasVersion()) { if (theValueSetToExpand.hasVersion()) {
optionalTermValueSet = myValueSetDao.findTermValueSetByUrlAndVersion(theValueSetToExpand.getUrl(), theValueSetToExpand.getVersion()); optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(theValueSetToExpand.getUrl(), theValueSetToExpand.getVersion());
} else { } else {
List<TermValueSet> termValueSets = myValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 1), theValueSetToExpand.getUrl()); optionalTermValueSet = findCurrentTermValueSet(theValueSetToExpand.getUrl());
if (termValueSets.size() > 0) {
optionalTermValueSet = Optional.of(termValueSets.get(0));
} else {
optionalTermValueSet = Optional.empty();
}
} }
} else { } else {
optionalTermValueSet = Optional.empty(); optionalTermValueSet = Optional.empty();
@ -1455,7 +1457,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
@Override @Override
public boolean isValueSetPreExpandedForCodeValidation(ValueSet theValueSet) { public boolean isValueSetPreExpandedForCodeValidation(ValueSet theValueSet) {
ResourcePersistentId valueSetResourcePid = myConceptStorageSvc.getValueSetResourcePid(theValueSet.getIdElement()); ResourcePersistentId valueSetResourcePid = myConceptStorageSvc.getValueSetResourcePid(theValueSet.getIdElement());
Optional<TermValueSet> optionalTermValueSet = myValueSetDao.findByResourcePid(valueSetResourcePid.getIdAsLong()); Optional<TermValueSet> optionalTermValueSet = myTermValueSetDao.findByResourcePid(valueSetResourcePid.getIdAsLong());
if (!optionalTermValueSet.isPresent()) { if (!optionalTermValueSet.isPresent()) {
ourLog.warn("ValueSet is not present in terminology tables. Will perform in-memory code validation. {}", getValueSetInfo(theValueSet)); ourLog.warn("ValueSet is not present in terminology tables. Will perform in-memory code validation. {}", getValueSetInfo(theValueSet));
@ -1760,7 +1762,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
TermValueSet termValueSet = optionalTermValueSet.get(); TermValueSet termValueSet = optionalTermValueSet.get();
termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS);
return myValueSetDao.saveAndFlush(termValueSet); return myTermValueSetDao.saveAndFlush(termValueSet);
}); });
if (valueSetToExpand == null) { if (valueSetToExpand == null) {
return; return;
@ -1769,18 +1771,18 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
// We have a ValueSet to pre-expand. // We have a ValueSet to pre-expand.
try { try {
ValueSet valueSet = txTemplate.execute(t -> { ValueSet valueSet = txTemplate.execute(t -> {
TermValueSet refreshedValueSetToExpand = myValueSetDao.findById(valueSetToExpand.getId()).orElseThrow(() -> new IllegalStateException("Unknown VS ID: " + valueSetToExpand.getId())); TermValueSet refreshedValueSetToExpand = myTermValueSetDao.findById(valueSetToExpand.getId()).orElseThrow(() -> new IllegalStateException("Unknown VS ID: " + valueSetToExpand.getId()));
return getValueSetFromResourceTable(refreshedValueSetToExpand.getResource()); return getValueSetFromResourceTable(refreshedValueSetToExpand.getResource());
}); });
assert valueSet != null; assert valueSet != null;
ValueSetConceptAccumulator accumulator = new ValueSetConceptAccumulator(valueSetToExpand, myValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao); ValueSetConceptAccumulator accumulator = new ValueSetConceptAccumulator(valueSetToExpand, myTermValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao);
expandValueSet(null, valueSet, accumulator); expandValueSet(null, valueSet, accumulator);
// We are done with this ValueSet. // We are done with this ValueSet.
txTemplate.execute(t -> { txTemplate.execute(t -> {
valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED); valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED);
myValueSetDao.saveAndFlush(valueSetToExpand); myTermValueSetDao.saveAndFlush(valueSetToExpand);
return null; return null;
}); });
@ -1790,7 +1792,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
ourLog.error("Failed to pre-expand ValueSet: " + e.getMessage(), e); ourLog.error("Failed to pre-expand ValueSet: " + e.getMessage(), e);
txTemplate.execute(t -> { txTemplate.execute(t -> {
valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND); valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND);
myValueSetDao.saveAndFlush(valueSetToExpand); myTermValueSetDao.saveAndFlush(valueSetToExpand);
return null; return null;
}); });
} }
@ -1873,7 +1875,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
private Optional<TermValueSet> getNextTermValueSetNotExpanded() { private Optional<TermValueSet> getNextTermValueSetNotExpanded() {
Optional<TermValueSet> retVal = Optional.empty(); Optional<TermValueSet> retVal = Optional.empty();
Slice<TermValueSet> page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED); Slice<TermValueSet> page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED);
if (!page.getContent().isEmpty()) { if (!page.getContent().isEmpty()) {
retVal = Optional.of(page.getContent().get(0)); retVal = Optional.of(page.getContent().get(0));
@ -1914,13 +1916,13 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
String version = termValueSet.getVersion(); String version = termValueSet.getVersion();
Optional<TermValueSet> optionalExistingTermValueSetByUrl; Optional<TermValueSet> optionalExistingTermValueSetByUrl;
if (version != null) { if (version != null) {
optionalExistingTermValueSetByUrl = myValueSetDao.findTermValueSetByUrlAndVersion(url, version); optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndVersion(url, version);
} else { } else {
optionalExistingTermValueSetByUrl = myValueSetDao.findTermValueSetByUrlAndNullVersion(url); optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(url);
} }
if (!optionalExistingTermValueSetByUrl.isPresent()) { if (!optionalExistingTermValueSetByUrl.isPresent()) {
myValueSetDao.save(termValueSet); myTermValueSetDao.save(termValueSet);
} else { } else {
TermValueSet existingTermValueSet = optionalExistingTermValueSetByUrl.get(); TermValueSet existingTermValueSet = optionalExistingTermValueSetByUrl.get();
@ -2338,6 +2340,29 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
return codeSystemValidateCode(codeSystemUrl, theVersion, code, display); return codeSystemValidateCode(codeSystemUrl, theVersion, code, display);
} }
/**
* When the search is for unversioned loinc system it uses the forcedId to obtain the current
* version, as it is not necessarily the last one anymore.
* For other cases it keeps on considering the last uploaded as the current
*/
@Override
public Optional<TermValueSet> findCurrentTermValueSet(String theUrl) {
if (TermReadSvcUtil.isLoincNotGenericUnversionedValueSet(theUrl)) {
if (TermReadSvcUtil.mustReturnEmptyValueSet(theUrl)) return Optional.empty();
String forcedId = theUrl.substring(LOINC_GENERIC_VALUESET_URL_PLUS_SLASH.length());
if (StringUtils.isBlank(forcedId)) return Optional.empty();
return myTermValueSetDao.findTermValueSetByForcedId(forcedId);
}
List<TermValueSet> termValueSetList = myTermValueSetDao.findTermValueSetByUrl(Pageable.ofSize(1), theUrl);
if (termValueSetList.isEmpty()) return Optional.empty();
return Optional.of(termValueSetList.get(0));
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private CodeValidationResult codeSystemValidateCode(String theCodeSystemUrl, String theCodeSystemVersion, String theCode, String theDisplay) { private CodeValidationResult codeSystemValidateCode(String theCodeSystemUrl, String theCodeSystemVersion, String theCode, String theDisplay) {
@ -2364,11 +2389,17 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
predicates.add(criteriaBuilder.equal(systemJoin.get("myCodeSystemUri"), theCodeSystemUrl)); predicates.add(criteriaBuilder.equal(systemJoin.get("myCodeSystemUri"), theCodeSystemUrl));
} }
// for loinc CodeSystem last version is not necessarily the current anymore, so if no version is present
// we need to query for the current, which is that which version is null
if (isNoneBlank(theCodeSystemVersion)) { if (isNoneBlank(theCodeSystemVersion)) {
predicates.add(criteriaBuilder.equal(systemVersionJoin.get("myCodeSystemVersionId"), theCodeSystemVersion)); predicates.add(criteriaBuilder.equal(systemVersionJoin.get("myCodeSystemVersionId"), theCodeSystemVersion));
} else {
if (theCodeSystemUrl.toLowerCase(Locale.ROOT).contains(LOINC_LOW)) {
predicates.add(criteriaBuilder.isNull(systemVersionJoin.get("myCodeSystemVersionId")));
} else { } else {
query.orderBy(criteriaBuilder.desc(root.get("myUpdated"))); query.orderBy(criteriaBuilder.desc(root.get("myUpdated")));
} }
}
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0])); Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
query.where(outerPredicate); query.where(outerPredicate);
@ -2492,4 +2523,23 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
return theReqLang.equalsIgnoreCase(theStoredLang); return theReqLang.equalsIgnoreCase(theStoredLang);
} }
@Override
public Optional<IBaseResource> readCodeSystemByForcedId(String theForcedId) {
@SuppressWarnings("unchecked")
List<ResourceTable> resultList = (List<ResourceTable>) myEntityManager.createQuery(
"select f.myResource from ForcedId f " +
"where f.myResourceType = 'CodeSystem' and f.myForcedId = '" + theForcedId + "'").getResultList();
if (resultList.isEmpty()) return Optional.empty();
if (resultList.size() > 1) throw new NonUniqueResultException(
"More than one CodeSystem is pointed by forcedId: " + theForcedId + ". Was constraint "
+ ForcedId.IDX_FORCEDID_TYPE_FID + " removed?");
IFhirResourceDao<CodeSystem> csDao = myDaoRegistry.getResourceDao("CodeSystem");
IBaseResource cs = csDao.toResource(resultList.get(0), false);
return Optional.of(cs );
}
} }

View File

@ -86,7 +86,9 @@ import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
private static final Logger ourLog = LoggerFactory.getLogger(TermCodeSystemStorageSvcImpl.class); private static final Logger ourLog = LoggerFactory.getLogger(TermCodeSystemStorageSvcImpl.class);
@ -139,6 +141,9 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
CodeSystem codeSystemResource = new CodeSystem(); CodeSystem codeSystemResource = new CodeSystem();
codeSystemResource.setUrl(theSystem); codeSystemResource.setUrl(theSystem);
codeSystemResource.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT); codeSystemResource.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
if (isBlank(codeSystemResource.getIdElement().getIdPart()) && theSystem.contains(LOINC_LOW)) {
codeSystemResource.setId(LOINC_LOW);
}
myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(codeSystemResource); myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(codeSystemResource);
cs = myCodeSystemDao.findByCodeSystemUri(theSystem); cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
@ -470,10 +475,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
codeSystemToStore = myCodeSystemVersionDao.saveAndFlush(codeSystemToStore); codeSystemToStore = myCodeSystemVersionDao.saveAndFlush(codeSystemToStore);
} }
// defaults to true boolean isMakeVersionCurrent = ITermCodeSystemStorageSvc.isMakeVersionCurrent(theRequestDetails);
boolean isMakeVersionCurrent = theRequestDetails == null ||
(boolean) theRequestDetails.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE);
if (isMakeVersionCurrent) { if (isMakeVersionCurrent) {
codeSystem.setCurrentVersion(codeSystemToStore); codeSystem.setCurrentVersion(codeSystemToStore);
if (codeSystem.getPid() == null) { if (codeSystem.getPid() == null) {

View File

@ -0,0 +1,62 @@
package ca.uhn.fhir.jpa.term;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.lang3.StringUtils;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_GENERIC_VALUESET_URL;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_GENERIC_VALUESET_URL_PLUS_SLASH;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
public class TermReadSvcUtil {
public static boolean mustReturnEmptyValueSet(String theUrl) {
if (! theUrl.startsWith(LOINC_GENERIC_VALUESET_URL)) return true;
if (! theUrl.startsWith(LOINC_GENERIC_VALUESET_URL_PLUS_SLASH)) {
throw new InternalErrorException("Don't know how to extract ValueSet's ForcedId from url: " + theUrl);
}
String forcedId = theUrl.substring(LOINC_GENERIC_VALUESET_URL_PLUS_SLASH.length());
return StringUtils.isBlank(forcedId);
}
public static boolean isLoincNotGenericUnversionedValueSet(String theUrl) {
boolean isLoincCodeSystem = StringUtils.containsIgnoreCase(theUrl, LOINC_LOW);
boolean isNoVersion = ! theUrl.contains("|");
boolean isNotLoincGenericValueSet = ! theUrl.equals(LOINC_GENERIC_VALUESET_URL);
return isLoincCodeSystem && isNoVersion && isNotLoincGenericValueSet;
}
public static boolean isLoincNotGenericUnversionedCodeSystem(String theUrl) {
boolean isLoincCodeSystem = StringUtils.containsIgnoreCase(theUrl, LOINC_LOW);
boolean isNoVersion = ! theUrl.contains("|");
return isLoincCodeSystem && isNoVersion;
}
}

View File

@ -33,7 +33,10 @@ import org.springframework.context.ApplicationContext;
import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.context.event.EventListener; import org.springframework.context.event.EventListener;
import java.security.InvalidParameterException;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
public class TermVersionAdapterSvcR4 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc { public class TermVersionAdapterSvcR4 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao; private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
@ -63,6 +66,9 @@ public class TermVersionAdapterSvcR4 extends BaseTermVersionAdapterSvcImpl imple
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) {
validateCodeSystemForStorage(theCodeSystemResource); validateCodeSystemForStorage(theCodeSystemResource);
if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) { if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) {
if (theCodeSystemResource.getUrl().contains(LOINC_LOW)) {
throw new InvalidParameterException("'loinc' CodeSystem must have an 'ID' element");
}
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()); String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl, theRequestDetails).getId(); return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl, theRequestDetails).getId();
} else { } else {

View File

@ -40,7 +40,16 @@ import java.util.List;
*/ */
public interface ITermCodeSystemStorageSvc { public interface ITermCodeSystemStorageSvc {
static final String MAKE_LOADING_VERSION_CURRENT = "make.loading.version.current"; String MAKE_LOADING_VERSION_CURRENT = "make.loading.version.current";
/**
* Defaults to true when parameter is null or entry is not present in requestDetails.myUserData
*/
static boolean isMakeVersionCurrent(RequestDetails theRequestDetails) {
return theRequestDetails == null ||
(boolean) theRequestDetails.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE);
}
void deleteCodeSystem(TermCodeSystem theCodeSystem); void deleteCodeSystem(TermCodeSystem theCodeSystem);

View File

@ -4,10 +4,8 @@ import ca.uhn.fhir.context.support.ConceptValidationOptions;
import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement; import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.IValueSetConceptAccumulator; import ca.uhn.fhir.jpa.term.IValueSetConceptAccumulator;
import ca.uhn.fhir.util.FhirVersionIndependentConcept; import ca.uhn.fhir.util.FhirVersionIndependentConcept;
@ -17,7 +15,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
@ -119,4 +116,14 @@ public interface ITermReadSvc extends IValidationSupport {
*/ */
CodeValidationResult codeSystemValidateCode(IIdType theCodeSystemId, String theValueSetUrl, String theVersion, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept); CodeValidationResult codeSystemValidateCode(IIdType theCodeSystemId, String theValueSetUrl, String theVersion, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept);
/**
* Version independent
*/
Optional<TermValueSet> findCurrentTermValueSet(String theUrl);
/**
* Version independent
*/
Optional<IBaseResource> readCodeSystemByForcedId(String theForcedId);
} }

View File

@ -33,6 +33,7 @@ public enum LoincUploadPropertiesEnum {
*/ */
LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"), LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"),
LOINC_XML_FILE("loinc.xml"),
/* /*
* MANDATORY * MANDATORY

View File

@ -0,0 +1,124 @@
package ca.uhn.fhir.jpa.dao;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.github.benmanes.caffeine.cache.Cache;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ValueSet;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.test.util.ReflectionTestUtils;
import java.util.function.Function;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ExtendWith(MockitoExtension.class)
class JpaPersistedResourceValidationSupportTest {
private FhirContext theFhirContext = FhirContext.forR4();
@Mock private ITermReadSvc myTermReadSvc;
@Mock private DaoRegistry myDaoRegistry;
@Mock private Cache<String, IBaseResource> myLoadCache;
@Mock private IFhirResourceDao<ValueSet> myValueSetResourceDao;
@InjectMocks
private IValidationSupport testedClass =
new JpaPersistedResourceValidationSupport(theFhirContext);
private Class<? extends IBaseResource> myCodeSystemType = CodeSystem.class;
private Class<? extends IBaseResource> myValueSetType = ValueSet.class;
@BeforeEach
public void setup() {
ReflectionTestUtils.setField(testedClass, "myValueSetType", myValueSetType);
}
@Nested
public class FetchCodeSystemTests {
@Test
void fetchCodeSystemMustUseForcedId() {
testedClass.fetchCodeSystem("string-containing-loinc");
verify(myTermReadSvc, times(1)).readCodeSystemByForcedId(LOINC_LOW);
verify(myLoadCache, never()).get(anyString(), isA(Function.class));
}
@Test
void fetchCodeSystemMustNotUseForcedId() {
testedClass.fetchCodeSystem("string-not-containing-l-o-i-n-c");
verify(myTermReadSvc, never()).readCodeSystemByForcedId(LOINC_LOW);
verify(myLoadCache, times(1)).get(anyString(), isA(Function.class));
}
}
@Nested
public class FetchValueSetTests {
@Test
void fetchValueSetMustUseForcedId() {
final String valueSetId = "string-containing-loinc";
ResourceNotFoundException thrown = assertThrows(
ResourceNotFoundException.class,
() -> testedClass.fetchValueSet(valueSetId));
assertTrue(thrown.getMessage().contains("Unable to find current version of ValueSet for url: " + valueSetId));
}
@Test
void fetchValueSetMustNotUseForcedId() {
testedClass.fetchValueSet("string-not-containing-l-o-i-n-c");
verify(myLoadCache, times(1)).get(anyString(), isA(Function.class));
}
}
}

View File

@ -144,7 +144,7 @@ public class TransactionProcessorTest {
} }
@Bean @Bean
public BaseTransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> versionAdapter() { public ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> versionAdapter() {
return new TransactionProcessorVersionAdapterR4(); return new TransactionProcessorVersionAdapterR4();
} }

View File

@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.dao.dstu2;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3Test; import ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3Test;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
@ -416,15 +416,15 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
* If any of this ever fails, it means that one of the OperationOutcome issue severity codes has changed code value across versions. We store the string as a constant, so something will need to * If any of this ever fails, it means that one of the OperationOutcome issue severity codes has changed code value across versions. We store the string as a constant, so something will need to
* be fixed. * be fixed.
*/ */
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR); assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseStorageDao.OO_SEVERITY_ERROR);
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR); assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseStorageDao.OO_SEVERITY_ERROR);
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.ERROR.getCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR); assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.ERROR.getCode(), BaseStorageDao.OO_SEVERITY_ERROR);
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO); assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseStorageDao.OO_SEVERITY_INFO);
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO); assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseStorageDao.OO_SEVERITY_INFO);
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.INFORMATION.getCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO); assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.INFORMATION.getCode(), BaseStorageDao.OO_SEVERITY_INFO);
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN); assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseStorageDao.OO_SEVERITY_WARN);
assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN); assertEquals(org.hl7.fhir.dstu3.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseStorageDao.OO_SEVERITY_WARN);
assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.WARNING.getCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN); assertEquals(ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum.WARNING.getCode(), BaseStorageDao.OO_SEVERITY_WARN);
} }
@Test @Test

View File

@ -7,6 +7,8 @@ import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.test.utilities.BatchJobHelper; import ca.uhn.fhir.test.utilities.BatchJobHelper;
import ca.uhn.fhir.util.BundleBuilder; import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
@ -15,9 +17,11 @@ import org.hl7.fhir.r4.model.OperationOutcome;
import org.hl7.fhir.r4.model.Organization; import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r5.model.StructureDefinition;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.mockito.stubbing.Answer;
import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobExecution;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -27,6 +31,9 @@ import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Mockito.when;
class DeleteExpungeDaoTest extends BaseJpaR4Test { class DeleteExpungeDaoTest extends BaseJpaR4Test {
@Autowired @Autowired
@ -51,6 +58,41 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
myDaoConfig.setExpungeBatchSize(defaultDaoConfig.getExpungeBatchSize()); myDaoConfig.setExpungeBatchSize(defaultDaoConfig.getExpungeBatchSize());
} }
@Test
public void testDeleteCascadeExpungeReturns400() {
// Create new organization
Organization organization = new Organization();
organization.setName("FOO");
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
Patient patient = new Patient();
patient.setManagingOrganization(new Reference(organizationId));
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
// Try to delete _cascade and _expunge on the organization
BaseServerResponseException e = assertThrows(BaseServerResponseException.class, () -> {
myOrganizationDao
.deleteByUrl("Organization?" + "_cascade=delete&" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
});
// Get not implemented HTTP 400 error
assertEquals(Constants.STATUS_HTTP_400_BAD_REQUEST, e.getStatusCode());
assertEquals("_expunge cannot be used with _cascade", e.getMessage());
// Try to delete with header 'X-Cascade' = delete
when(mySrd.getHeader(Constants.HEADER_CASCADE)).thenReturn(Constants.CASCADE_DELETE);
e = assertThrows(BaseServerResponseException.class, () -> {
myOrganizationDao
.deleteByUrl("Organization?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
});
// Get not implemented HTTP 400 error
assertEquals(Constants.STATUS_HTTP_400_BAD_REQUEST, e.getStatusCode());
assertEquals("_expunge cannot be used with _cascade", e.getMessage());
}
@Test @Test
public void testDeleteExpungeThrowExceptionIfForeignKeyLinksExists() { public void testDeleteExpungeThrowExceptionIfForeignKeyLinksExists() {
// setup // setup

View File

@ -5,7 +5,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.JpaResourceDao; import ca.uhn.fhir.jpa.dao.JpaResourceDao;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel; import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
@ -1014,13 +1014,13 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
* be fixed. * be fixed.
*/ */
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirDao.OO_SEVERITY_ERROR); assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirDao.OO_SEVERITY_ERROR);
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_ERROR); assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseStorageDao.OO_SEVERITY_ERROR);
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirDao.OO_SEVERITY_ERROR); assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.ERROR.toCode(), BaseHapiFhirDao.OO_SEVERITY_ERROR);
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirDao.OO_SEVERITY_INFO); assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirDao.OO_SEVERITY_INFO);
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_INFO); assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseStorageDao.OO_SEVERITY_INFO);
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirDao.OO_SEVERITY_INFO); assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.INFORMATION.toCode(), BaseHapiFhirDao.OO_SEVERITY_INFO);
assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirDao.OO_SEVERITY_WARN); assertEquals(org.hl7.fhir.dstu2.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirDao.OO_SEVERITY_WARN);
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirResourceDao.OO_SEVERITY_WARN); assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseStorageDao.OO_SEVERITY_WARN);
assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirDao.OO_SEVERITY_WARN); assertEquals(org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity.WARNING.toCode(), BaseHapiFhirDao.OO_SEVERITY_WARN);
} }

View File

@ -81,6 +81,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
@ -501,7 +502,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
cs.setUrl("http://loinc.org"); cs.setUrl("http://loinc.org");
cs.addConcept().setCode("123-4").setDisplay("Code 123 4"); cs.addConcept().setCode("123-4").setDisplay("Code 123 4");
myCodeSystemDao.create(cs); cs.setId(LOINC_LOW);
myCodeSystemDao.update(cs);
Group group = new Group(); Group group = new Group();
group.setId("ABC"); group.setId("ABC");
@ -568,7 +570,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
cs.setUrl("http://loinc.org"); cs.setUrl("http://loinc.org");
cs.addConcept().setCode("123-4").setDisplay("Code 123 4"); cs.addConcept().setCode("123-4").setDisplay("Code 123 4");
myCodeSystemDao.create(cs); cs.setId(LOINC_LOW);
myCodeSystemDao.update(cs);
Group group = new Group(); Group group = new Group();
group.setId("ABC"); group.setId("ABC");
@ -635,7 +638,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
cs.setUrl("http://loinc.org"); cs.setUrl("http://loinc.org");
cs.addConcept().setCode("123-4").setDisplay("Code 123 4"); cs.addConcept().setCode("123-4").setDisplay("Code 123 4");
myCodeSystemDao.create(cs); cs.setId(LOINC_LOW);
myCodeSystemDao.update(cs);
Group group = new Group(); Group group = new Group();
group.setId("ABC"); group.setId("ABC");
@ -1710,7 +1714,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
cs.setUrl(ITermLoaderSvc.LOINC_URI); cs.setUrl(ITermLoaderSvc.LOINC_URI);
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
cs.addConcept().setCode("10013-1"); cs.addConcept().setCode("10013-1");
myCodeSystemDao.create(cs); cs.setId(LOINC_LOW);
myCodeSystemDao.update(cs);
IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(new UriType("http://fooVs"), null, new StringType("10013-1"), new StringType(ITermLoaderSvc.LOINC_URI), null, null, null, mySrd); IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(new UriType("http://fooVs"), null, new StringType("10013-1"), new StringType(ITermLoaderSvc.LOINC_URI), null, null, null, mySrd);

View File

@ -141,7 +141,6 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.data.domain.Pageable;
import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.util.AopTestUtils; import org.springframework.test.util.AopTestUtils;
@ -158,7 +157,7 @@ import java.util.Optional;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@ -569,9 +568,9 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
public List<String> getExpandedConceptsByValueSetUrl(String theValuesetUrl) { public List<String> getExpandedConceptsByValueSetUrl(String theValuesetUrl) {
return runInTransaction(() -> { return runInTransaction(() -> {
List<TermValueSet> valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl); Optional<TermValueSet> valueSetOpt = myTermSvc.findCurrentTermValueSet(theValuesetUrl);
assertEquals(1, valueSets.size()); assertTrue(valueSetOpt.isPresent());
TermValueSet valueSet = valueSets.get(0); TermValueSet valueSet = valueSetOpt.get();
List<TermValueSetConcept> concepts = valueSet.getConcepts(); List<TermValueSetConcept> concepts = valueSet.getConcepts();
return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList()); return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList());
}); });

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.interceptor.validation; package ca.uhn.fhir.jpa.interceptor.validation;
import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider; import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider;
import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.MethodOutcome;
@ -76,7 +75,7 @@ public class RepositoryValidatingInterceptorHttpR4Test extends BaseJpaR4Test {
} }
private RepositoryValidatingRuleBuilder newRuleBuilder() { private RepositoryValidatingRuleBuilder newRuleBuilder() {
return myApplicationContext.getBean(BaseConfig.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class); return myApplicationContext.getBean(RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class);
} }
} }

View File

@ -1,6 +1,5 @@
package ca.uhn.fhir.jpa.interceptor.validation; package ca.uhn.fhir.jpa.interceptor.validation;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.rest.api.PatchTypeEnum; import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
@ -388,7 +387,7 @@ public class RepositoryValidatingInterceptorR4Test extends BaseJpaR4Test {
private RepositoryValidatingRuleBuilder newRuleBuilder() { private RepositoryValidatingRuleBuilder newRuleBuilder() {
return myApplicationContext.getBean(BaseConfig.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class); return myApplicationContext.getBean(RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class);
} }
} }

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.interceptor.validation; package ca.uhn.fhir.jpa.interceptor.validation;
import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test; import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
@ -23,7 +22,6 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.List; import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
@ -118,7 +116,7 @@ public class ValidationMessageSuppressingInterceptorTest extends BaseResourcePro
public void testRepositoryValidation() { public void testRepositoryValidation() {
createPatient(withActiveTrue(), withId("A")); createPatient(withActiveTrue(), withId("A"));
List<IRepositoryValidatingRule> rules = myApplicationContext.getBean(BaseConfig.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class) List<IRepositoryValidatingRule> rules = myApplicationContext.getBean(RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER, RepositoryValidatingRuleBuilder.class)
.forResourcesOfType("Encounter") .forResourcesOfType("Encounter")
.requireValidationToDeclaredProfiles().withBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Ignore) .requireValidationToDeclaredProfiles().withBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Ignore)
.build(); .build();

View File

@ -1,35 +1,5 @@
package ca.uhn.fhir.jpa.provider.r4; package ca.uhn.fhir.jpa.provider.r4;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.context.ContextLoader;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.context.support.GenericWebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.servlet.DispatcherServlet;
import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
@ -56,6 +26,35 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider; import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
import ca.uhn.fhir.rest.server.provider.ReindexProvider; import ca.uhn.fhir.rest.server.provider.ReindexProvider;
import ca.uhn.fhir.test.utilities.JettyUtil; import ca.uhn.fhir.test.utilities.JettyUtil;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.context.ContextLoader;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.context.support.GenericWebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.servlet.DispatcherServlet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {

View File

@ -0,0 +1,78 @@
package ca.uhn.fhir.jpa.term;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.r4.model.CodeSystem;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.test.util.ReflectionTestUtils;
import java.security.InvalidParameterException;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class TermVersionAdapterSvcR4Test {
private final TermVersionAdapterSvcR4 testedClass = new TermVersionAdapterSvcR4();
@Mock private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
@Mock ServletRequestDetails theRequestDetails;
@Mock DaoMethodOutcome theDaoMethodOutcome;
@Test
void createOrUpdateCodeSystemMustHaveId() {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("a-loinc-system");
InvalidParameterException thrown = assertThrows(
InvalidParameterException.class,
() -> testedClass.createOrUpdateCodeSystem(codeSystem, new ServletRequestDetails()));
assertTrue(thrown.getMessage().contains("'loinc' CodeSystem must have an 'ID' element"));
}
@Test
void createOrUpdateCodeSystemWithIdNoException() {
ReflectionTestUtils.setField(testedClass, "myCodeSystemResourceDao", myCodeSystemResourceDao);
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("a-loinc-system").setId(LOINC_LOW);
when(myCodeSystemResourceDao.update(codeSystem, theRequestDetails)).thenReturn(theDaoMethodOutcome);
testedClass.createOrUpdateCodeSystem(codeSystem, theRequestDetails);
verify(myCodeSystemResourceDao, Mockito.times(1)).update(codeSystem, theRequestDetails);
}
}

View File

@ -25,7 +25,6 @@ import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -75,6 +74,7 @@ import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
@ -930,7 +930,7 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest {
testProps.put(LOINC_CODESYSTEM_MAKE_CURRENT.getCode(), "false"); testProps.put(LOINC_CODESYSTEM_MAKE_CURRENT.getCode(), "false");
doReturn(mockFileDescriptors).when(testedSvc).getLoadedFileDescriptors(mockFileDescriptorList); doReturn(mockFileDescriptors).when(testedSvc).getLoadedFileDescriptors(mockFileDescriptorList);
InvalidRequestException thrown = Assertions.assertThrows(InvalidRequestException.class, InvalidRequestException thrown = assertThrows(InvalidRequestException.class,
() -> testedSvc.loadLoinc(mockFileDescriptorList, mySrd) ); () -> testedSvc.loadLoinc(mockFileDescriptorList, mySrd) );
assertEquals("'" + LOINC_CODESYSTEM_VERSION.getCode() + "' property is required when '" + assertEquals("'" + LOINC_CODESYSTEM_VERSION.getCode() + "' property is required when '" +

View File

@ -0,0 +1,702 @@
package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.context.support.ValidationSupportContext;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.UriType;
import org.hl7.fhir.r4.model.ValueSet;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Answers;
import org.mockito.Mock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.util.ResourceUtils;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_DUPLICATE_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_DUPLICATE_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_CODESYSTEM_MAKE_CURRENT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_CODESYSTEM_VERSION;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DUPLICATE_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_TERMS_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_HIERARCHY_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_PRIMARY_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_RSNA_PLAYBOOK_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UPLOAD_PROPERTIES_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_XML_FILE;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.when;
/**
* Tests load and validate CodeSystem and ValueSet so test names as uploadFirstCurrent... mean uploadCodeSystemAndValueSetCurrent...
*/
public class TerminologySvcImplCurrentVersionR4Test extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(TerminologySvcImplCurrentVersionR4Test.class);
public static final String BASE_LOINC_URL = "http://loinc.org";
public static final String BASE_LOINC_VS_URL = BASE_LOINC_URL + "/vs/";
// some ValueSets have a version specified independent of the CS version being uploaded. This one doesn't
public static final String VS_NO_VERSIONED_ON_UPLOAD_ID = "loinc-rsna-radiology-playbook";
public static final String VS_NO_VERSIONED_ON_UPLOAD = BASE_LOINC_VS_URL + VS_NO_VERSIONED_ON_UPLOAD_ID;
public static final String VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE = "17787-3";
public static final String VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY = "NM Thyroid gland Study report";
// some ValueSets have a version specified independent of the CS version being uploaded. This is one of them
public static final String VS_VERSIONED_ON_UPLOAD_ID = "LL1000-0";
public static final String VS_VERSIONED_ON_UPLOAD = BASE_LOINC_VS_URL + VS_VERSIONED_ON_UPLOAD_ID;
public static final String VS_VERSIONED_ON_UPLOAD_FIRST_CODE = "LA13825-7";
public static final String VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY = "1 slice or 1 dinner roll";
public static final String VS_ANSWER_LIST_VERSION = "Beta.1";
public static final Set<String> possibleVersions = Sets.newHashSet("2.67", "2.68", "2.69");
@Mock
private HttpServletResponse mockServletResponse;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private ServletRequestDetails mockRequestDetails;
@Autowired
private EntityManager myEntityManager;
@Autowired
private TermLoaderSvcImpl myTermLoaderSvc;
@Autowired
private ITermReadSvc myITermReadSvc;
@Autowired
@Qualifier(BaseConfig.JPA_VALIDATION_SUPPORT)
private IValidationSupport myJpaPersistedResourceValidationSupport;
private ZipCollectionBuilder myFiles;
private ServletRequestDetails myRequestDetails = new ServletRequestDetails();
private Properties uploadProperties;
private IFhirResourceDao<ValueSet> myValueSetIFhirResourceDao;
@BeforeEach
public void beforeEach() throws Exception {
File file = ResourceUtils.getFile("classpath:loinc-ver/" + LOINC_UPLOAD_PROPERTIES_FILE.getCode());
uploadProperties = new Properties();
uploadProperties.load(new FileInputStream(file));
myValueSetIFhirResourceDao = myDaoRegistry.getResourceDao(ValueSet.class);
when(mockRequestDetails.getServer().getDefaultPageSize()).thenReturn(25);
}
/**
* For input version or for current (when input is null) validates search, expand, lookup and validateCode operations
*/
private void validateOperations(String currentVersion, Collection<String> theExpectedVersions) {
validateValueSetSearch(theExpectedVersions);
validateValueExpand(currentVersion, theExpectedVersions);
validateValueLookup(currentVersion, theExpectedVersions);
validateValidateCode(currentVersion, theExpectedVersions);
// nothing to test for subsumes operation as it works only for concepts which share CodeSystem and version
}
private void validateValidateCode(String theCurrentVersion, Collection<String> allVersions) {
IValidationSupport.CodeValidationResult resultNoVersioned = myCodeSystemDao.validateCode(null,
new UriType(BASE_LOINC_URL), null, new CodeType(VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE),
null, null, null, null);
assertNotNull(resultNoVersioned);
assertEquals(prefixWithVersion(theCurrentVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), resultNoVersioned.getDisplay());
IValidationSupport.CodeValidationResult resultVersioned = myCodeSystemDao.validateCode(null,
new UriType(BASE_LOINC_URL), null, new CodeType(VS_VERSIONED_ON_UPLOAD_FIRST_CODE),
null, null, null, null);
assertNotNull(resultVersioned);
assertEquals(prefixWithVersion(theCurrentVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), resultVersioned.getDisplay());
allVersions.forEach(this::validateValidateCodeForVersion);
}
private void validateValidateCodeForVersion(String theVersion) {
IValidationSupport.CodeValidationResult resultNoVersioned = myCodeSystemDao.validateCode(null,
new UriType(BASE_LOINC_URL), new StringType(theVersion), new CodeType(VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE),
null, null, null, null);
assertNotNull(resultNoVersioned);
assertEquals(prefixWithVersion(theVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), resultNoVersioned.getDisplay());
IValidationSupport.CodeValidationResult resultVersioned = myCodeSystemDao.validateCode(null,
new UriType(BASE_LOINC_URL), new StringType(theVersion), new CodeType(VS_VERSIONED_ON_UPLOAD_FIRST_CODE),
null, null, null, null);
assertNotNull(resultVersioned);
assertEquals(prefixWithVersion(theVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), resultVersioned.getDisplay());
}
private void validateValueLookup(String theCurrentVersion, Collection<String> allVersions) {
IValidationSupport.LookupCodeResult resultNoVer = myValidationSupport.lookupCode(
new ValidationSupportContext(myValidationSupport), BASE_LOINC_URL, VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE, null);
assertNotNull(resultNoVer);
String expectedNoVer = prefixWithVersion(theCurrentVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY);
assertEquals(expectedNoVer, resultNoVer.getCodeDisplay());
IValidationSupport.LookupCodeResult resultWithVer = myValidationSupport.lookupCode(
new ValidationSupportContext(myValidationSupport), BASE_LOINC_URL, VS_VERSIONED_ON_UPLOAD_FIRST_CODE, null);
assertNotNull(resultWithVer);
String expectedWithVer = prefixWithVersion(theCurrentVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY);
assertEquals(expectedWithVer, resultWithVer.getCodeDisplay());
allVersions.forEach(this::lookupForVersion);
}
private void lookupForVersion(String theVersion) {
IValidationSupport.LookupCodeResult resultNoVer = myValidationSupport.lookupCode(
new ValidationSupportContext(myValidationSupport), BASE_LOINC_URL + "|" + theVersion,
VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE, null);
assertNotNull(resultNoVer);
String expectedNoVer = prefixWithVersion(theVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY);
assertEquals(expectedNoVer, resultNoVer.getCodeDisplay());
IValidationSupport.LookupCodeResult resultWithVer = myValidationSupport.lookupCode(
new ValidationSupportContext(myValidationSupport), BASE_LOINC_URL + "|" + theVersion,
VS_VERSIONED_ON_UPLOAD_FIRST_CODE, null);
assertNotNull(resultWithVer);
String expectedWithVer = prefixWithVersion(theVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY);
assertEquals(expectedWithVer, resultWithVer.getCodeDisplay());
}
private String prefixWithVersion(String version, String suffix) {
return (version == null ? "" : "v" + version + " ") + suffix;
}
private void validateValueExpand(String currentVersion, Collection<String> theAllVersions) {
// for CS ver = null, VS ver = null
ValueSet vs = myValueSetDao.expandByIdentifier(VS_NO_VERSIONED_ON_UPLOAD, null);
assertEquals(1, vs.getExpansion().getContains().size());
// version was added prefixing code display to validate
assertEquals(prefixWithVersion(currentVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY),
vs.getExpansion().getContains().iterator().next().getDisplay());
// for CS ver = null, VS ver != null
ValueSet vs1 = myValueSetDao.expandByIdentifier(
VS_VERSIONED_ON_UPLOAD + "|" + VS_ANSWER_LIST_VERSION, null);
assertEquals(3, vs1.getExpansion().getContains().size());
assertEquals(prefixWithVersion(currentVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY),
vs1.getExpansion().getContains().iterator().next().getDisplay());
validateExpandedTermConcepts(currentVersion, theAllVersions);
// now for each uploaded version
theAllVersions.forEach(this::validateValueExpandForVersion);
}
private void validateExpandedTermConcepts(String theCurrentVersion, Collection<String> theAllVersions) {
TermConcept termConceptNoVerCsvNoVer = (TermConcept) myEntityManager.createQuery(
"select tc from TermConcept tc join fetch tc.myCodeSystem tcsv where tc.myCode = '" +
VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE + "' and tcsv.myCodeSystemVersionId is null").getSingleResult();
assertNotNull(termConceptNoVerCsvNoVer);
// data should have version because it was loaded with a version
assertEquals(prefixWithVersion(theCurrentVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), termConceptNoVerCsvNoVer.getDisplay());
TermConcept termConceptVerCsvNoVer = (TermConcept) myEntityManager.createQuery(
"select tc from TermConcept tc join fetch tc.myCodeSystem tcsv where tc.myCode = '" +
VS_VERSIONED_ON_UPLOAD_FIRST_CODE + "' and tcsv.myCodeSystemVersionId is null").getSingleResult();
assertNotNull(termConceptVerCsvNoVer);
// data should have version because it was loaded with a version
assertEquals(prefixWithVersion(theCurrentVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), termConceptVerCsvNoVer.getDisplay());
if (theCurrentVersion != null) {
TermConcept termConceptNoVerCsvVer = (TermConcept) myEntityManager.createQuery(
"select tc from TermConcept tc join fetch tc.myCodeSystem tcsv where tc.myCode = '" +
VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE + "' and tcsv.myCodeSystemVersionId = '" + theCurrentVersion + "'").getSingleResult();
assertNotNull(termConceptNoVerCsvVer);
// data should have version because it was loaded with a version
assertEquals(prefixWithVersion(theCurrentVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), termConceptNoVerCsvVer.getDisplay());
TermConcept termConceptVerCsvVer = (TermConcept) myEntityManager.createQuery(
"select tc from TermConcept tc join fetch tc.myCodeSystem tcsv where tc.myCode = '" +
VS_VERSIONED_ON_UPLOAD_FIRST_CODE + "' and tcsv.myCodeSystemVersionId = '" + theCurrentVersion + "'").getSingleResult();
assertNotNull(termConceptVerCsvVer);
// data should have version because it was loaded with a version
assertEquals(prefixWithVersion(theCurrentVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), termConceptVerCsvVer.getDisplay());
}
theAllVersions.forEach(this::validateExpandedTermConceptsForVersion);
}
private void validateExpandedTermConceptsForVersion(String theVersion) {
TermConcept termConceptNoVer = (TermConcept) myEntityManager.createQuery(
"select tc from TermConcept tc join fetch tc.myCodeSystem tcsv where tc.myCode = '" +
VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE + "' and tcsv.myCodeSystemVersionId = '" + theVersion + "'").getSingleResult();
assertNotNull(termConceptNoVer);
assertEquals(prefixWithVersion(theVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), termConceptNoVer.getDisplay());
TermConcept termConceptVer = (TermConcept) myEntityManager.createQuery(
"select tc from TermConcept tc join fetch tc.myCodeSystem tcsv where tc.myCode = '" +
VS_VERSIONED_ON_UPLOAD_FIRST_CODE + "' and tcsv.myCodeSystemVersionId = '" + theVersion + "'").getSingleResult();
assertNotNull(termConceptVer);
assertEquals(prefixWithVersion(theVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY), termConceptVer.getDisplay());
}
private void validateValueExpandForVersion(String theVersion) {
// for CS ver != null, VS ver = null
ValueSet vs2 = myValueSetDao.expandByIdentifier(
VS_NO_VERSIONED_ON_UPLOAD + "|" + theVersion, null);
assertEquals(1, vs2.getExpansion().getContains().size());
// version was added before code display to validate
assertEquals(prefixWithVersion(theVersion, VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY),
vs2.getExpansion().getContains().iterator().next().getDisplay());
// for CS ver != null, VS ver != null
ValueSet vs3 = myValueSetDao.expandByIdentifier(
VS_VERSIONED_ON_UPLOAD + "|" + VS_ANSWER_LIST_VERSION + "-" + theVersion, null);
assertEquals(3, vs3.getExpansion().getContains().size());
// version was added before code display to validate
assertEquals(prefixWithVersion(theVersion, VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY),
vs3.getExpansion().getContains().iterator().next().getDisplay());
}
private void validateValueSetSearch(Collection<String> theExpectedIdVersions) {
// first validate search for CS ver = null VS ver = null
SearchParameterMap paramsNoUploadVer = new SearchParameterMap("url", new UriParam(VS_NO_VERSIONED_ON_UPLOAD));
int expectedResultQty = theExpectedIdVersions.size() + 1; // + 1 because an extra null version (the current) is always present
IBundleProvider noUploadVerResult = myValueSetIFhirResourceDao.search(paramsNoUploadVer, mockRequestDetails, mockServletResponse);
List<IBaseResource> noUploadVerValueSets = noUploadVerResult.getAllResources();
assertEquals(expectedResultQty, noUploadVerValueSets.size());
matchUnqualifiedIds(noUploadVerValueSets, theExpectedIdVersions);
// now validate search for CS ver = null VS ver != null
SearchParameterMap paramsUploadVer = new SearchParameterMap("url", new UriParam(VS_VERSIONED_ON_UPLOAD));
paramsUploadVer.add("version", new TokenParam(VS_ANSWER_LIST_VERSION));
IBundleProvider uploadVerResult = myValueSetIFhirResourceDao.search(paramsUploadVer, mockRequestDetails, mockServletResponse);
List<IBaseResource> uploadVerValueSets = uploadVerResult.getAllResources();
assertEquals(1, uploadVerValueSets.size());
assertEquals(VS_VERSIONED_ON_UPLOAD_ID, uploadVerValueSets.get(0).getIdElement().getIdPart());
assertEquals(VS_ANSWER_LIST_VERSION, ((ValueSet) uploadVerValueSets.get(0)).getVersion());
// now validate each specific uploaded version
theExpectedIdVersions.forEach(this::validateValueSetSearchForVersion);
}
/**
* Some ValueSets (IE: AnswerLists), can have a specific version, different than the version of the
* CodeSystem with which they were uploaded. That version is what we distinguish in both sets of tests here,
* no the CodeSystem version.
*/
private void validateValueSetSearchForVersion(String theVersion) {
// for no versioned VS (VS version, different than CS version)
SearchParameterMap paramsUploadNoVer = new SearchParameterMap("url", new UriParam(VS_NO_VERSIONED_ON_UPLOAD));
paramsUploadNoVer.add("version", new TokenParam(theVersion));
IBundleProvider uploadNoVerResult = myValueSetIFhirResourceDao.search(paramsUploadNoVer, mockRequestDetails, mockServletResponse);
List<IBaseResource> uploadNoVerValueSets = uploadNoVerResult.getAllResources();
assertEquals(1, uploadNoVerValueSets.size());
ValueSet loadNoVersionValueSet = (ValueSet) uploadNoVerValueSets.get(0);
String expectedLoadNoVersionUnqualifiedId = VS_NO_VERSIONED_ON_UPLOAD_ID + (theVersion == null ? "" : "-" + theVersion);
assertEquals(expectedLoadNoVersionUnqualifiedId, loadNoVersionValueSet.getIdElement().getIdPart());
// versioned VS (VS version, different than CS version)
SearchParameterMap paramsUploadVer = new SearchParameterMap("url", new UriParam(VS_VERSIONED_ON_UPLOAD));
paramsUploadVer.add("version", new TokenParam(VS_ANSWER_LIST_VERSION + "-" + theVersion));
IBundleProvider uploadVerResult = myValueSetIFhirResourceDao.search(paramsUploadVer, mockRequestDetails, mockServletResponse);
List<IBaseResource> uploadVerValueSets = uploadVerResult.getAllResources();
assertEquals(1, uploadVerValueSets.size());
ValueSet loadVersionValueSet = (ValueSet) uploadVerValueSets.get(0);
String expectedLoadVersionUnqualifiedId = VS_VERSIONED_ON_UPLOAD_ID + (theVersion == null ? "" : "-" + theVersion);
assertEquals(expectedLoadVersionUnqualifiedId, loadVersionValueSet.getIdElement().getIdPart());
}
/**
* Validates that the collection of unqualified IDs of each element of theValueSets matches the expected
* unqualifiedIds corresponding to the uploaded versions plus one with no version
*
* @param theValueSets the ValueSet collection
* @param theExpectedIdVersions the collection of expected versions
*/
private void matchUnqualifiedIds(List<IBaseResource> theValueSets, Collection<String> theExpectedIdVersions) {
// set should contain one entry per expectedVersion
List<String> expectedNoVersionUnqualifiedIds = theExpectedIdVersions.stream()
.map(expVer -> VS_NO_VERSIONED_ON_UPLOAD_ID + "-" + expVer)
.collect(Collectors.toList());
// plus one entry for null version
expectedNoVersionUnqualifiedIds.add(VS_NO_VERSIONED_ON_UPLOAD_ID);
List<String> resultUnqualifiedIds = theValueSets.stream()
.map(r -> r.getIdElement().getIdPart())
.collect(Collectors.toList());
assertThat(resultUnqualifiedIds, containsInAnyOrder(resultUnqualifiedIds.toArray()));
Set<String> theExpectedIdVersionsPlusNull = Sets.newHashSet(theExpectedIdVersions);
theExpectedIdVersionsPlusNull.add(null);
assertThat(theExpectedIdVersionsPlusNull, containsInAnyOrder(
theValueSets.stream().map(r -> ((ValueSet) r).getVersion()).toArray()));
}
/**
* Validates that:
* for CodeSystem:
* _ current CS has no version
* _ current TCS has no version
* for ValueSet:
* _ current TVSs with upload version have upload-version with no version append
* _ current TVSs with no upload version have null version
*/
private void runCommonValidations(List<String> theAllVersions) {
// for CodeSystem:
// _ current CS is present and has no version
CodeSystem codeSystem = myCodeSystemDao.read(new IdType(LOINC_LOW));
String csString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem);
ourLog.info("CodeSystem:\n" + csString);
HashSet<String> shouldNotBePresentVersions = new HashSet<>(possibleVersions);
theAllVersions.forEach(shouldNotBePresentVersions::remove);
shouldNotBePresentVersions.forEach(vv -> assertFalse(csString.contains(vv),
"Found version string: '" + vv + "' in CodeSystem: " + csString));
// same reading it from term service
CodeSystem cs = myITermReadSvc.fetchCanonicalCodeSystemFromCompleteContext(BASE_LOINC_URL);
assertEquals(BASE_LOINC_URL, cs.getUrl());
assertNull(cs.getVersion());
// _ current TermCodeSystem has no version
TermCodeSystemVersion termCSVersion = fetchCurrentCodeSystemVersion();
assertNotNull(termCSVersion);
assertNull(termCSVersion.getCodeSystemVersionId());
// for ValueSet:
// for ValueSet resource
ValueSet vs = (ValueSet) myJpaPersistedResourceValidationSupport.fetchValueSet(VS_NO_VERSIONED_ON_UPLOAD);
assertNotNull(vs);
assertEquals(VS_NO_VERSIONED_ON_UPLOAD, vs.getUrl());
assertNull(vs.getVersion());
// current TermVSs with no upload version have null version
Optional<TermValueSet> noUploadCurrentVsOpt = myITermReadSvc.findCurrentTermValueSet(VS_NO_VERSIONED_ON_UPLOAD);
assertTrue(noUploadCurrentVsOpt.isPresent());
assertNull(noUploadCurrentVsOpt.get().getVersion());
// current VSs with upload version have upload-version with no version append
Optional<TermValueSet> uploadCurrentVsOpt = myITermReadSvc.findCurrentTermValueSet(VS_VERSIONED_ON_UPLOAD);
assertTrue(uploadCurrentVsOpt.isPresent());
assertEquals(VS_ANSWER_LIST_VERSION, uploadCurrentVsOpt.get().getVersion());
}
@Test()
public void uploadCurrentNoVersion() throws Exception {
IIdType csId = uploadLoincCodeSystem(null, true);
runCommonValidations(Collections.emptyList());
// validate operation for current (no version parameter)
validateOperations(null, Collections.emptySet());
// tests conditions which were failing after VS expansion (before fix for issue-2995)
validateTermConcepts(Lists.newArrayList((String) null));
}
@Test()
public void uploadWithVersion() throws Exception {
String ver = "2.67";
IIdType csId = uploadLoincCodeSystem(ver, true);
runCommonValidations(Collections.singletonList(ver));
// validate operation for specific version
validateOperations(ver, Collections.singleton(ver));
// tests conditions which were failing after VS expansion (before fix for issue-2995)
validateTermConcepts(Lists.newArrayList(ver, ver));
}
@Test
public void uploadNoVersionThenNoCurrent() throws Exception {
uploadLoincCodeSystem(null, true);
String ver = "2.67";
uploadLoincCodeSystem(ver, false);
// myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
runCommonValidations(Collections.singletonList(ver));
// validate operation for specific version
validateOperations(null, Collections.singleton(ver));
// tests conditions which were failing after VS expansion (before fix for issue-2995)
validateTermConcepts(Lists.newArrayList(null, ver));
}
@Test
public void uploadWithVersionThenNoCurrent() throws Exception {
String currentVer = "2.67";
uploadLoincCodeSystem(currentVer, true);
String nonCurrentVer = "2.68";
uploadLoincCodeSystem(nonCurrentVer, false);
// myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
runCommonValidations(Lists.newArrayList(currentVer, nonCurrentVer));
// validate operation for specific version
validateOperations(currentVer, Lists.newArrayList(currentVer, nonCurrentVer));
// tests conditions which were failing after VS expansion (before fix for issue-2995)
validateTermConcepts(Lists.newArrayList(currentVer, currentVer, nonCurrentVer));
}
/**
* Validates TermConcepts were created in the sequence indicated by the parameters
* and their displays match the expected versions
*/
private void validateTermConcepts(ArrayList<String> theExpectedVersions) {
@SuppressWarnings("unchecked")
List<TermConcept> termConceptNoVerList = (List<TermConcept>) myEntityManager.createQuery(
"from TermConcept where myCode = '" + VS_NO_VERSIONED_ON_UPLOAD_FIRST_CODE + "' order by myId").getResultList();
assertEquals(theExpectedVersions.size(), termConceptNoVerList.size());
for (int i = 0; i < theExpectedVersions.size(); i++) {
assertEquals( prefixWithVersion(theExpectedVersions.get(i), VS_NO_VERSIONED_ON_UPLOAD_FIRST_DISPLAY),
termConceptNoVerList.get(i).getDisplay(), "TermCode with id: " + i + " display");
}
@SuppressWarnings("unchecked")
List<TermConcept> termConceptWithVerList = (List<TermConcept>) myEntityManager.createQuery(
"from TermConcept where myCode = '" + VS_VERSIONED_ON_UPLOAD_FIRST_CODE + "' order by myId").getResultList();
assertEquals(theExpectedVersions.size(), termConceptWithVerList.size());
for (int i = 0; i < theExpectedVersions.size(); i++) {
assertEquals( prefixWithVersion(theExpectedVersions.get(i), VS_VERSIONED_ON_UPLOAD_FIRST_DISPLAY),
termConceptWithVerList.get(i).getDisplay(), "TermCode with id: " + i + " display");
}
}
@Test
public void uploadNoVersionThenNoCurrentThenCurrent() throws Exception {
uploadLoincCodeSystem(null, true);
String nonCurrentVer = "2.67";
uploadLoincCodeSystem(nonCurrentVer, false);
String currentVer = "2.68";
uploadLoincCodeSystem(currentVer, true);
runCommonValidations(Lists.newArrayList(nonCurrentVer, currentVer));
// validate operation for specific version
validateOperations(currentVer, Lists.newArrayList(nonCurrentVer, currentVer));
// tests conditions which were failing after VS expansion (before fix for issue-2995)
validateTermConcepts(Lists.newArrayList(nonCurrentVer, currentVer, currentVer));
}
@Test
public void uploadWithVersionThenNoCurrentThenCurrent() throws Exception {
String firstCurrentVer = "2.67";
uploadLoincCodeSystem(firstCurrentVer, true);
String noCurrentVer = "2.68";
uploadLoincCodeSystem(noCurrentVer, false);
String lastCurrentVer = "2.69";
uploadLoincCodeSystem(lastCurrentVer, true);
runCommonValidations(Lists.newArrayList(firstCurrentVer, noCurrentVer, lastCurrentVer));
// validate operation for specific version
validateOperations(lastCurrentVer, Lists.newArrayList(firstCurrentVer, noCurrentVer, lastCurrentVer));
// tests conditions which were failing after VS expansion (before fix for issue-2995)
validateTermConcepts(Lists.newArrayList(firstCurrentVer, noCurrentVer, lastCurrentVer, lastCurrentVer));
}
private IIdType uploadLoincCodeSystem(String theVersion, boolean theMakeItCurrent) throws Exception {
myFiles = new ZipCollectionBuilder();
myRequestDetails.getUserData().put(LOINC_CODESYSTEM_MAKE_CURRENT, theMakeItCurrent);
uploadProperties.put(LOINC_CODESYSTEM_MAKE_CURRENT.getCode(), Boolean.toString(theMakeItCurrent));
assertTrue(
theVersion == null || theVersion.equals("2.67") || theVersion.equals("2.68") || theVersion.equals("2.69"),
"Version supported are: 2.67, 2.68, 2.69 and null" );
if (StringUtils.isBlank(theVersion)) {
uploadProperties.remove(LOINC_CODESYSTEM_VERSION.getCode());
} else {
uploadProperties.put(LOINC_CODESYSTEM_VERSION.getCode(), theVersion);
}
addLoincMandatoryFilesToZip(myFiles, theVersion);
UploadStatistics stats = myTermLoaderSvc.loadLoinc(myFiles.getFiles(), mySrd);
myTerminologyDeferredStorageSvc.saveAllDeferred();
return stats.getTarget();
}
public void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles, String theVersion) throws IOException {
String theClassPathPrefix = getClassPathPrefix(theVersion);
addBaseLoincMandatoryFilesToZip(theFiles, true, theClassPathPrefix);
theFiles.addPropertiesZip(uploadProperties, LOINC_UPLOAD_PROPERTIES_FILE.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_LINK_FILE_PRIMARY_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT.getCode());
}
private String getClassPathPrefix(String theVersion) {
String theClassPathPrefix = "/loinc-ver/v-no-version/";
if (StringUtils.isBlank(theVersion)) return theClassPathPrefix;
switch(theVersion) {
case "2.67": return "/loinc-ver/v267/";
case "2.68": return "/loinc-ver/v268/";
case "2.69": return "/loinc-ver/v269/";
}
fail("Setup failed. Unexpected version: " + theVersion);
return null;
}
private static void addBaseLoincMandatoryFilesToZip(
ZipCollectionBuilder theFiles, Boolean theIncludeTop2000, String theClassPathPrefix) throws IOException {
theFiles.addFileZip(theClassPathPrefix, LOINC_XML_FILE.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_GROUP_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_GROUP_TERMS_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_PARENT_GROUP_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_DUPLICATE_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_HIERARCHY_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_DUPLICATE_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_LINK_DUPLICATE_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode());
if (theIncludeTop2000) {
theFiles.addFileZip(theClassPathPrefix, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode());
theFiles.addFileZip(theClassPathPrefix, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode());
}
}
private TermCodeSystemVersion fetchCurrentCodeSystemVersion() {
return (TermCodeSystemVersion) myEntityManager.createQuery(
"select tcsv from TermCodeSystemVersion tcsv join fetch tcsv.myCodeSystem tcs " +
"where tcs.myCurrentVersion = tcsv" ).getSingleResult();
}
}

View File

@ -42,6 +42,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertSame;
@ -406,7 +407,7 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
.addFilter() .addFilter()
.setProperty("copyright") .setProperty("copyright")
.setOp(ValueSet.FilterOperator.EQUAL) .setOp(ValueSet.FilterOperator.EQUAL)
.setValue("loinc"); .setValue(LOINC_LOW);
outcome = myTermSvc.expandValueSet(null, vs); outcome = myTermSvc.expandValueSet(null, vs);
codes = toCodesContains(outcome.getExpansion().getContains()); codes = toCodesContains(outcome.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("47239-9")); assertThat(codes, containsInAnyOrder("47239-9"));
@ -478,7 +479,7 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
.addFilter() .addFilter()
.setProperty("copyright") .setProperty("copyright")
.setOp(ValueSet.FilterOperator.EQUAL) .setOp(ValueSet.FilterOperator.EQUAL)
.setValue("loinc"); .setValue(LOINC_LOW);
outcome = myTermSvc.expandValueSet(null, vs); outcome = myTermSvc.expandValueSet(null, vs);
codes = toCodesContains(outcome.getExpansion().getContains()); codes = toCodesContains(outcome.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("50015-7", "43343-3", "43343-4")); assertThat(codes, containsInAnyOrder("50015-7", "43343-3", "43343-4"));

View File

@ -13,11 +13,15 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Properties;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream; import java.util.zip.ZipOutputStream;
public class ZipCollectionBuilder { public class ZipCollectionBuilder {
public static final String ZIP_ENTRY_PREFIX = "SnomedCT_Release_INT_20160131_Full/Terminology/";
private static final Logger ourLog = LoggerFactory.getLogger(ZipCollectionBuilder.class); private static final Logger ourLog = LoggerFactory.getLogger(ZipCollectionBuilder.class);
private final ArrayList<ITermLoaderSvc.FileDescriptor> myFiles; private final ArrayList<ITermLoaderSvc.FileDescriptor> myFiles;
@ -58,7 +62,7 @@ public class ZipCollectionBuilder {
bos = new ByteArrayOutputStream(); bos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(bos); ZipOutputStream zos = new ZipOutputStream(bos);
ourLog.info("Adding {} to test zip", theClasspathFileName); ourLog.info("Adding {} to test zip", theClasspathFileName);
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + theOutputFilename)); zos.putNextEntry(new ZipEntry(ZIP_ENTRY_PREFIX + theOutputFilename));
zos.write(readFile(theClasspathPrefix, theClasspathFileName)); zos.write(readFile(theClasspathPrefix, theClasspathFileName));
zos.closeEntry(); zos.closeEntry();
zos.close(); zos.close();
@ -76,6 +80,36 @@ public class ZipCollectionBuilder {
}); });
} }
public void addPropertiesZip(Properties properties, String theOutputFilename) throws IOException {
ByteArrayOutputStream bos;
bos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(bos);
ourLog.info("Adding properties to test zip");
zos.putNextEntry(new ZipEntry(ZIP_ENTRY_PREFIX + theOutputFilename));
zos.write(getPropertiesBytes(properties));
zos.closeEntry();
zos.close();
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
myFiles.add(new ITermLoaderSvc.FileDescriptor() {
@Override
public String getFilename() {
return "AAA.zip";
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(bos.toByteArray());
}
});
}
private byte[] getPropertiesBytes(Properties theProperties) throws IOException {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
theProperties.store(byteArrayOutputStream, "");
return byteArrayOutputStream.toByteArray();
}
private byte[] readFile(String theClasspathPrefix, String theClasspathFileName) throws IOException { private byte[] readFile(String theClasspathPrefix, String theClasspathFileName) throws IOException {
String classpathName = theClasspathPrefix + theClasspathFileName; String classpathName = theClasspathPrefix + theClasspathFileName;
InputStream stream = getClass().getResourceAsStream(classpathName); InputStream stream = getClass().getResourceAsStream(classpathName);

View File

@ -0,0 +1,239 @@
package ca.uhn.fhir.jpa.term.api;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.TermReadSvcR4;
import ca.uhn.fhir.jpa.term.TermReadSvcUtil;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.collect.Lists;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.CodeSystem;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.data.domain.Pageable;
import org.springframework.test.util.ReflectionTestUtils;
import javax.persistence.EntityManager;
import javax.persistence.NonUniqueResultException;
import java.util.Collections;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class ITermReadSvcTest {
private final ITermReadSvc testedClass = new TermReadSvcR4();
@Mock private ITermValueSetDao myTermValueSetDao;
@Mock private DaoRegistry myDaoRegistry;
@Mock private IFhirResourceDao<CodeSystem> myFhirResourceDao;
@Nested
public class FindCurrentTermValueSet {
@BeforeEach
public void setup() {
ReflectionTestUtils.setField(testedClass, "myTermValueSetDao", myTermValueSetDao);
}
@Test
void forLoinc() {
String valueSetId = "a-loinc-value-set";
testedClass.findCurrentTermValueSet("http://loinc.org/vs/" + valueSetId);
verify(myTermValueSetDao, times(1)).findTermValueSetByForcedId(valueSetId);
verify(myTermValueSetDao, never()).findTermValueSetByUrl(isA(Pageable.class), anyString());
}
@Test
void forNotLoinc() {
String valueSetId = "not-a-loin-c-value-set";
testedClass.findCurrentTermValueSet("http://not-loin-c.org/vs/" + valueSetId);
verify(myTermValueSetDao, never()).findTermValueSetByForcedId(valueSetId);
verify(myTermValueSetDao, times(1)).findTermValueSetByUrl(isA(Pageable.class), anyString());
}
}
@Nested
public class MustReturnEmptyValueSet {
@Test
void doesntStartWithGenericVSReturnsTrue() {
boolean ret = TermReadSvcUtil.mustReturnEmptyValueSet("http://boing.org");
assertTrue(ret);
}
@Test
void doesntStartWithGenericVSPlusSlashThrows() {
InternalErrorException thrown = assertThrows(
InternalErrorException.class,
() -> TermReadSvcUtil.mustReturnEmptyValueSet("http://loinc.org/vs-no-slash-after-vs"));
assertTrue(thrown.getMessage().contains("Don't know how to extract ValueSet's ForcedId from url:"));
}
@Test
void blankVsIdReturnsTrue() {
boolean ret = TermReadSvcUtil.mustReturnEmptyValueSet("http://loinc.org/vs/");
assertTrue(ret);
}
@Test
void startsWithGenericPlusSlashPlusIdReturnsFalse() {
boolean ret = TermReadSvcUtil.mustReturnEmptyValueSet("http://loinc.org/vs/some-vs-id");
assertFalse(ret);
}
}
@Nested
public class IsLoincNotGenericUnversionedCodeSystem {
@Test
void doesntContainLoincReturnsFalse() {
boolean ret = TermReadSvcUtil.isLoincNotGenericUnversionedCodeSystem("http://boing.org");
assertFalse(ret);
}
@Test
void hasVersionReturnsFalse() {
boolean ret = TermReadSvcUtil.isLoincNotGenericUnversionedCodeSystem("http://boing.org|v2.68");
assertFalse(ret);
}
@Test
void containsLoincAndNoVersionReturnsTrue() {
boolean ret = TermReadSvcUtil.isLoincNotGenericUnversionedCodeSystem("http://anything-plus-loinc.org");
assertTrue(ret);
}
}
@Nested
public class IsLoincNotGenericUnversionedValueSet {
@Test
void notLoincReturnsFalse() {
boolean ret = TermReadSvcUtil.isLoincNotGenericUnversionedValueSet("http://anything-but-loin-c.org");
assertFalse(ret);
}
@Test
void isLoincAndHasVersionReturnsFalse() {
boolean ret = TermReadSvcUtil.isLoincNotGenericUnversionedValueSet("http://loinc.org|v2.67");
assertFalse(ret);
}
@Test
void isLoincNoVersionButEqualsGenericValueSetUrlReturnsFalse() {
boolean ret = TermReadSvcUtil.isLoincNotGenericUnversionedValueSet("http://loinc.org/vs");
assertFalse(ret);
}
@Test
void isLoincNoVersionStartsWithGenericValueSetPlusSlashPlusIdReturnsTrue() {
boolean ret = TermReadSvcUtil.isLoincNotGenericUnversionedValueSet("http://loinc.org/vs/vs-id");
assertTrue(ret);
}
}
@Nested
public class ReadByForcedId {
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private EntityManager myEntityManager;
@Mock private ResourceTable resource1;
@Mock private ResourceTable resource2;
@Mock private IBaseResource myCodeSystemResource;
@BeforeEach
public void setup() {
ReflectionTestUtils.setField(testedClass, "myEntityManager", myEntityManager);
}
@Test
void getNoneReturnsOptionalEmpty() {
when(myEntityManager.createQuery(anyString()).getResultList())
.thenReturn(Collections.emptyList());
Optional<IBaseResource> result = testedClass.readCodeSystemByForcedId("a-cs-id");
assertFalse(result.isPresent());
}
@Test
void getMultipleThrows() {
when(myEntityManager.createQuery(anyString()).getResultList())
.thenReturn(Lists.newArrayList(resource1, resource2));
NonUniqueResultException thrown = assertThrows(
NonUniqueResultException.class,
() -> testedClass.readCodeSystemByForcedId("a-cs-id"));
assertTrue(thrown.getMessage().contains("More than one CodeSystem is pointed by forcedId:"));
}
@Test
void getOneConvertToResource() {
ReflectionTestUtils.setField(testedClass, "myDaoRegistry", myDaoRegistry);
when(myEntityManager.createQuery(anyString()).getResultList())
.thenReturn(Lists.newArrayList(resource1));
when(myDaoRegistry.getResourceDao("CodeSystem")).thenReturn(myFhirResourceDao);
when(myFhirResourceDao.toResource(resource1, false)).thenReturn(myCodeSystemResource);
testedClass.readCodeSystemByForcedId("a-cs-id");
verify(myFhirResourceDao, times(1)).toResource(any(), eq(false));
}
}
}

View File

@ -0,0 +1,93 @@
#################
### MANDATORY ###
#################
# Answer lists (ValueSets of potential answers/values for LOINC "questions")
## File must be present
loinc.answerlist.file=AccessoryFiles/AnswerFile/AnswerList.csv
# Answer list links (connects LOINC observation codes to answer list codes)
## File must be present
loinc.answerlist.link.file=AccessoryFiles/AnswerFile/LoincAnswerListLink.csv
# Document ontology
## File must be present
loinc.document.ontology.file=AccessoryFiles/DocumentOntology/DocumentOntology.csv
# LOINC codes
## File must be present
loinc.file=LoincTable/Loinc.csv
# LOINC hierarchy
## File must be present
loinc.hierarchy.file=AccessoryFiles/MultiAxialHierarchy/MultiAxialHierarchy.csv
# IEEE medical device codes
## File must be present
loinc.ieee.medical.device.code.mapping.table.file=AccessoryFiles/LoincIeeeMedicalDeviceCodeMappingTable/LoincIeeeMedicalDeviceCodeMappingTable.csv
# Imaging document codes
## File must be present
loinc.imaging.document.codes.file=AccessoryFiles/ImagingDocuments/ImagingDocumentCodes.csv
# Part
## File must be present
loinc.part.file=AccessoryFiles/PartFile/Part.csv
# Part link
## File must be present
loinc.part.link.primary.file=AccessoryFiles/PartFile/LoincPartLink_Primary.csv
loinc.part.link.supplementary.file=AccessoryFiles/PartFile/LoincPartLink_Supplementary.csv
# Part related code mapping
## File must be present
loinc.part.related.code.mapping.file=AccessoryFiles/PartFile/PartRelatedCodeMapping.csv
# RSNA playbook
## File must be present
loinc.rsna.playbook.file=AccessoryFiles/LoincRsnaRadiologyPlaybook/LoincRsnaRadiologyPlaybook.csv
# Top 2000 codes - SI
## File must be present
loinc.top2000.common.lab.results.si.file=AccessoryFiles/Top2000Results/SI/Top2000CommonLabResultsSi.csv
# Top 2000 codes - US
## File must be present
loinc.top2000.common.lab.results.us.file=AccessoryFiles/Top2000Results/US/Top2000CommonLabResultsUs.csv
# Universal lab order ValueSet
## File must be present
loinc.universal.lab.order.valueset.file=AccessoryFiles/LoincUniversalLabOrdersValueSet/LoincUniversalLabOrdersValueSet.csv
################
### OPTIONAL ###
################
# This is the version identifier for the answer list file
## Key may be omitted
loinc.answerlist.version=Beta.1
# This is the version identifier for uploaded ConceptMap resources
## Key may be omitted
loinc.conceptmap.version=Beta.1
# Group
## Default value if key not provided: AccessoryFiles/GroupFile/Group.csv
## File may be omitted
loinc.group.file=AccessoryFiles/GroupFile/Group.csv
# Group terms
## Default value if key not provided: AccessoryFiles/GroupFile/GroupLoincTerms.csv
## File may be omitted
loinc.group.terms.file=AccessoryFiles/GroupFile/GroupLoincTerms.csv
# Parent group
## Default value if key not provided: AccessoryFiles/GroupFile/ParentGroup.csv
## File may be omitted
loinc.parent.group.file=AccessoryFiles/GroupFile/ParentGroup.csv
# Consumer Names
## Default value if key not provided: AccessoryFiles/ConsumerName/ConsumerName.csv
## File may be omitted
loinc.consumer.name.file=AccessoryFiles/ConsumerName/ConsumerName.csv
# Linguistic Variants
## Default value if key not provided: AccessoryFiles/LinguisticVariants/LinguisticVariants.csv
## File may be omitted
loinc.linguistic.variants.file=AccessoryFiles/LinguisticVariants/LinguisticVariants.csv

View File

@ -0,0 +1,82 @@
#################
### MANDATORY ###
#################
# Answer lists (ValueSets of potential answers/values for LOINC "questions")
## File must be present
loinc.answerlist.file=AccessoryFiles/AnswerFile/AnswerList.csv
# Answer list links (connects LOINC observation codes to answer list codes)
## File must be present
loinc.answerlist.link.file=AccessoryFiles/AnswerFile/LoincAnswerListLink.csv
# Document ontology
## File must be present
loinc.document.ontology.file=AccessoryFiles/DocumentOntology/DocumentOntology.csv
# LOINC codes
## File must be present
loinc.file=LoincTable/Loinc.csv
# LOINC hierarchy
## File must be present
loinc.hierarchy.file=AccessoryFiles/MultiAxialHierarchy/MultiAxialHierarchy.csv
# IEEE medical device codes
## File must be present
loinc.ieee.medical.device.code.mapping.table.file=AccessoryFiles/LoincIeeeMedicalDeviceCodeMappingTable/LoincIeeeMedicalDeviceCodeMappingTable.csv
# Imaging document codes
## File must be present
loinc.imaging.document.codes.file=AccessoryFiles/ImagingDocuments/ImagingDocumentCodes.csv
# Part
## File must be present
loinc.part.file=AccessoryFiles/PartFile/Part.csv
# Part link
## File must be present
loinc.part.link.file=AccessoryFiles/PartFile/LoincPartLink.csv
# Part related code mapping
## File must be present
loinc.part.related.code.mapping.file=AccessoryFiles/PartFile/PartRelatedCodeMapping.csv
# RSNA playbook
## File must be present
loinc.rsna.playbook.file=AccessoryFiles/LoincRsnaRadiologyPlaybook/LoincRsnaRadiologyPlaybook.csv
# Top 2000 codes - SI
## File must be present
loinc.top2000.common.lab.results.si.file=AccessoryFiles/Top2000Results/SI/Top2000CommonLabResultsSi.csv
# Top 2000 codes - US
## File must be present
loinc.top2000.common.lab.results.us.file=AccessoryFiles/Top2000Results/US/Top2000CommonLabResultsUs.csv
# Universal lab order ValueSet
## File must be present
loinc.universal.lab.order.valueset.file=AccessoryFiles/LoincUniversalLabOrdersValueSet/LoincUniversalLabOrdersValueSet.csv
################
### OPTIONAL ###
################
# This is the version identifier for the answer list file
## Key may be omitted
loinc.answerlist.version=Beta.1
# This is the version identifier for uploaded ConceptMap resources
## Key may be omitted
loinc.conceptmap.version=Beta.1
# Group
## Default value if key not provided: AccessoryFiles/GroupFile/Group.csv
## File may be omitted
loinc.group.file=AccessoryFiles/GroupFile/Group.csv
# Group terms
## Default value if key not provided: AccessoryFiles/GroupFile/GroupLoincTerms.csv
## File may be omitted
loinc.group.terms.file=AccessoryFiles/GroupFile/GroupLoincTerms.csv
# Parent group
## Default value if key not provided: AccessoryFiles/GroupFile/ParentGroup.csv
## File may be omitted
loinc.parent.group.file=AccessoryFiles/GroupFile/ParentGroup.csv

View File

@ -0,0 +1,12 @@
"AnswerListId","AnswerListName" ,"AnswerListOID" ,"ExtDefinedYN","ExtDefinedAnswerListCodeSystem","ExtDefinedAnswerListLink" ,"AnswerStringId","LocalAnswerCode","LocalAnswerCodeSystem","SequenceNumber","DisplayText" ,"ExtCodeId","ExtCodeDisplayName" ,"ExtCodeSystem" ,"ExtCodeSystemVersion" ,"ExtCodeSystemCopyrightNotice" ,"SubsequentTextPrompt","Description","Score"
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13825-7" ,"1" , ,1 ,"1 slice or 1 dinner roll" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13838-0" ,"2" , ,2 ,"2 slices or 2 dinner rolls" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13892-7" ,"3" , ,3 ,"More than 2 slices or 2 dinner rolls", , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA6270-8" ,"00" , ,1 ,"Never" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13836-4" ,"01" , ,2 ,"1-3 times per month" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13834-9" ,"02" , ,3 ,"1-2 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13853-9" ,"03" , ,4 ,"3-4 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13860-4" ,"04" , ,5 ,"5-6 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13827-3" ,"05" , ,6 ,"1 time per day" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA4389-8" ,"97" , ,11 ,"Refused" ,"443390004","Refused (qualifier value)","http://snomed.info/sct","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College", , ,
"LL1892-0" ,"ICD-9_ICD-10" ,"1.3.6.1.4.1.12009.10.1.1069","Y" , ,"http://www.cdc.gov/nchs/icd.htm", , , , , , , , , , , , ,
Can't render this file because it contains an unexpected character in line 1 and column 31.

View File

@ -0,0 +1,11 @@
"LoincNumber","LongCommonName" ,"AnswerListId","AnswerListName" ,"AnswerListLinkType","ApplicableContext"
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]","LL1000-0" ,"PhenX05_13_30D bread amt","NORMATIVE" ,
"10061-0" ,"S' wave amplitude in lead I" ,"LL1311-1" ,"PhenX12_44" ,"EXAMPLE" ,
"10331-7" ,"Rh [Type] in Blood" ,"LL360-9" ,"Pos|Neg" ,"EXAMPLE" ,
"10389-5" ,"Blood product.other [Type]" ,"LL2413-4" ,"Othr bld prod" ,"EXAMPLE" ,
"10390-3" ,"Blood product special preparation [Type]" ,"LL2422-5" ,"Blood prod treatment" ,"EXAMPLE" ,
"10393-7" ,"Factor IX given [Type]" ,"LL2420-9" ,"Human/Recomb" ,"EXAMPLE" ,
"10395-2" ,"Factor VIII given [Type]" ,"LL2420-9" ,"Human/Recomb" ,"EXAMPLE" ,
"10401-8" ,"Immune serum globulin given [Type]" ,"LL2421-7" ,"IM/IV" ,"EXAMPLE" ,
"10410-9" ,"Plasma given [Type]" ,"LL2417-5" ,"Plasma type" ,"EXAMPLE" ,
"10568-4" ,"Clarity of Semen" ,"LL2427-4" ,"Clear/Opales/Milky" ,"EXAMPLE" ,
Can't render this file because it contains an unexpected character in line 1 and column 30.

View File

@ -0,0 +1,6 @@
"LoincNumber","ConsumerName"
"61438-8","Consumer Name 61438-8"
,"Consumer Name X"
47239-9",""
"17787-3","Consumer Name 17787-3"
"38699-5","1,1-Dichloroethane, Air"
Can't render this file because it contains an unexpected character in line 4 and column 8.

View File

@ -0,0 +1,10 @@
"LoincNumber","PartNumber","PartTypeName","PartSequenceOrder","PartName"
"11488-4","LP173418-7","Document.Kind","1","Note"
"11488-4","LP173110-0","Document.TypeOfService","1","Consultation"
"11488-4","LP173061-5","Document.Setting","1","{Setting}"
"11488-4","LP187187-2","Document.Role","1","{Role}"
"11490-0","LP173418-7","Document.Kind","1","Note"
"11490-0","LP173221-5","Document.TypeOfService","1","Discharge summary"
"11490-0","LP173061-5","Document.Setting","1","{Setting}"
"11490-0","LP173084-7","Document.Role","1","Physician"
"11492-6","LP173418-7","Document.Kind","1","Note"
1 LoincNumber PartNumber PartTypeName PartSequenceOrder PartName
2 11488-4 LP173418-7 Document.Kind 1 Note
3 11488-4 LP173110-0 Document.TypeOfService 1 Consultation
4 11488-4 LP173061-5 Document.Setting 1 {Setting}
5 11488-4 LP187187-2 Document.Role 1 {Role}
6 11490-0 LP173418-7 Document.Kind 1 Note
7 11490-0 LP173221-5 Document.TypeOfService 1 Discharge summary
8 11490-0 LP173061-5 Document.Setting 1 {Setting}
9 11490-0 LP173084-7 Document.Role 1 Physician
10 11492-6 LP173418-7 Document.Kind 1 Note

View File

@ -0,0 +1,2 @@
"ParentGroupId","GroupId","Group","Archetype","Status","VersionFirstReleased"
"LG100-4","LG1695-8","1,4-Dichlorobenzene|MCnc|Pt|ANYBldSerPl","","Active",""
1 ParentGroupId GroupId Group Archetype Status VersionFirstReleased
2 LG100-4 LG1695-8 1,4-Dichlorobenzene|MCnc|Pt|ANYBldSerPl Active

View File

@ -0,0 +1,3 @@
"Category","GroupId","Archetype","LoincNumber","LongCommonName"
"Flowsheet","LG1695-8","","17424-3","1,4-Dichlorobenzene [Mass/volume] in Blood"
"Flowsheet","LG1695-8","","13006-2","1,4-Dichlorobenzene [Mass/volume] in Serum or Plasma"
1 Category GroupId Archetype LoincNumber LongCommonName
2 Flowsheet LG1695-8 17424-3 1,4-Dichlorobenzene [Mass/volume] in Blood
3 Flowsheet LG1695-8 13006-2 1,4-Dichlorobenzene [Mass/volume] in Serum or Plasma

View File

@ -0,0 +1,2 @@
"ParentGroupId","ParentGroup","Status"
"LG100-4","Chem_DrugTox_Chal_Sero_Allergy<SAME:Comp|Prop|Tm|Syst (except intravascular and urine)><ANYBldSerPlas,ANYUrineUrineSed><ROLLUP:Method>","ACTIVE"
1 ParentGroupId ParentGroup Status
2 LG100-4 Chem_DrugTox_Chal_Sero_Allergy<SAME:Comp|Prop|Tm|Syst (except intravascular and urine)><ANYBldSerPlas,ANYUrineUrineSed><ROLLUP:Method> ACTIVE

View File

@ -0,0 +1,10 @@
"LOINC_NUM","LONG_COMMON_NAME"
"11525-3","US Pelvis Fetus for pregnancy"
"17787-3","NM Thyroid gland Study report"
"18744-3","Bronchoscopy study"
"18746-8","Colonoscopy study"
"18748-4","Diagnostic imaging study"
"18751-8","Endoscopy study"
"18753-4","Flexible sigmoidoscopy study"
"24531-6","US Retroperitoneum"
"24532-4","US Abdomen RUQ"
1 LOINC_NUM LONG_COMMON_NAME
2 11525-3 US Pelvis Fetus for pregnancy
3 17787-3 NM Thyroid gland Study report
4 18744-3 Bronchoscopy study
5 18746-8 Colonoscopy study
6 18748-4 Diagnostic imaging study
7 18751-8 Endoscopy study
8 18753-4 Flexible sigmoidoscopy study
9 24531-6 US Retroperitoneum
10 24532-4 US Abdomen RUQ

View File

@ -0,0 +1,9 @@
"ID","ISO_LANGUAGE","ISO_COUNTRY","LANGUAGE_NAME","PRODUCER"
"5","zh","CN","Chinese (CHINA)","Lin Zhang, A LOINC volunteer from China"
"7","es","AR","Spanish (ARGENTINA)","Conceptum Medical Terminology Center"
"8","fr","CA","French (CANADA)","Canada Health Infoway Inc."
,"de","AT","German (AUSTRIA)","ELGA, Austria"
"88",,"AT","German (AUSTRIA)","ELGA, Austria"
"89","de",,"German (AUSTRIA)","ELGA, Austria"
"90","de","AT",,"ELGA, Austria"
"24","de","AT","German (AUSTRIA)","ELGA, Austria"
1 ID ISO_LANGUAGE ISO_COUNTRY LANGUAGE_NAME PRODUCER
2 5 zh CN Chinese (CHINA) Lin Zhang, A LOINC volunteer from China
3 7 es AR Spanish (ARGENTINA) Conceptum Medical Terminology Center
4 8 fr CA French (CANADA) Canada Health Infoway Inc.
5 de AT German (AUSTRIA) ELGA, Austria
6 88 AT German (AUSTRIA) ELGA, Austria
7 89 de German (AUSTRIA) ELGA, Austria
8 90 de AT ELGA, Austria
9 24 de AT German (AUSTRIA) ELGA, Austria

View File

@ -0,0 +1,4 @@
"LOINC_NUM","COMPONENT","PROPERTY","TIME_ASPCT","SYSTEM","SCALE_TYP","METHOD_TYP","CLASS","SHORTNAME","LONG_COMMON_NAME","RELATEDNAMES2","LinguisticVariantDisplayName"
"61438-8","Entlassungsbrief Ärztlich","Ergebnis","Zeitpunkt","{Setting}","Dokument","Dermatologie","DOC.ONTOLOGY","de shortname","de long common name","de related names 2","de linguistic variant display name"
"43730-1","","","","","","","","","","EBV-DNA qn. PCR","EBV-DNA quantitativ PCR"
"17787-3","","","","","","","","","","CoV OC43 RNA ql/SM P","Coronavirus OC43 RNA ql. /Sondermaterial PCR"
1 LOINC_NUM COMPONENT PROPERTY TIME_ASPCT SYSTEM SCALE_TYP METHOD_TYP CLASS SHORTNAME LONG_COMMON_NAME RELATEDNAMES2 LinguisticVariantDisplayName
2 61438-8 Entlassungsbrief Ärztlich Ergebnis Zeitpunkt {Setting} Dokument Dermatologie DOC.ONTOLOGY de shortname de long common name de related names 2 de linguistic variant display name
3 43730-1 EBV-DNA qn. PCR EBV-DNA quantitativ PCR
4 17787-3 CoV OC43 RNA ql/SM P Coronavirus OC43 RNA ql. /Sondermaterial PCR

View File

@ -0,0 +1,6 @@
"LOINC_NUM","COMPONENT","PROPERTY","TIME_ASPCT","SYSTEM","SCALE_TYP","METHOD_TYP","CLASS","SHORTNAME","LONG_COMMON_NAME","RELATEDNAMES2","LinguisticVariantDisplayName"
"61438-8","Cellules de Purkinje cytoplasmique type 2 , IgG","Titre","Temps ponctuel","Sérum","Quantitatif","Immunofluorescence","Sérologie","","","",""
"11704-4","Gliale nucléaire de type 1 , IgG","Titre","Temps ponctuel","LCR","Quantitatif","Immunofluorescence","Sérologie","","","",""
,"Cellules de Purkinje cytoplasmique type 2 , IgG","Titre","Temps ponctuel","Sérum","Quantitatif",,,"","","",""
"17787-3","Virus respiratoire syncytial bovin","Présence-Seuil","Temps ponctuel","XXX","Ordinal","Culture spécifique à un microorganisme","Microbiologie","","","",""
"17788-1","Cellules de Purkinje cytoplasmique type 2 , IgG","Titre","Temps ponctuel","Sérum","Quantitatif",,,"","","",""
1 LOINC_NUM COMPONENT PROPERTY TIME_ASPCT SYSTEM SCALE_TYP METHOD_TYP CLASS SHORTNAME LONG_COMMON_NAME RELATEDNAMES2 LinguisticVariantDisplayName
2 61438-8 Cellules de Purkinje cytoplasmique type 2 , IgG Titre Temps ponctuel Sérum Quantitatif Immunofluorescence Sérologie
3 11704-4 Gliale nucléaire de type 1 , IgG Titre Temps ponctuel LCR Quantitatif Immunofluorescence Sérologie
4 Cellules de Purkinje cytoplasmique type 2 , IgG Titre Temps ponctuel Sérum Quantitatif
5 17787-3 Virus respiratoire syncytial bovin Présence-Seuil Temps ponctuel XXX Ordinal Culture spécifique à un microorganisme Microbiologie
6 17788-1 Cellules de Purkinje cytoplasmique type 2 , IgG Titre Temps ponctuel Sérum Quantitatif

View File

@ -0,0 +1,9 @@
"LOINC_NUM","COMPONENT","PROPERTY","TIME_ASPCT","SYSTEM","SCALE_TYP","METHOD_TYP","CLASS","SHORTNAME","LONG_COMMON_NAME","RELATEDNAMES2","LinguisticVariantDisplayName"
"61438-8","血流速度.收缩期.最大值","速度","时间点","大脑中动脉","定量型","超声.多普勒","产科学检查与测量指标.超声","","","Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑Cerebral 时刻;随机;随意;瞬间 术语""cerebral""指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
"11704-4","血流速度.收缩期.最大值","速度","时间点","动脉导管","定量型","超声.多普勒","产科学检查与测量指标.超声","","","动态 动脉管 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
"17787-3","血流速度.收缩期.最大值","速度","时间点","二尖瓣^胎儿","定量型","超声.多普勒","产科学检查与测量指标.超声","","","僧帽瓣 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 胎;超系统 - 胎儿 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
"61438-6",,"速度","时间点","二尖瓣^胎儿","定量型","超声.多普勒","产科学检查与测量指标.超声","","","僧帽瓣 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 胎;超系统 - 胎儿 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
"10000-8","血流速度.收缩期.最大值",,"时间点","二尖瓣^胎儿","定量型","超声.多普勒","产科学检查与测量指标.超声","","","僧帽瓣 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 胎;超系统 - 胎儿 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
"17788-1","血流速度.收缩期.最大值","速度",,"大脑中动脉","定量型","超声.多普勒","产科学检查与测量指标.超声","","","Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑Cerebral 时刻;随机;随意;瞬间 术语""cerebral""指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
"11488-4","血流速度.收缩期.最大值","速度","时间点",,"定量型","超声.多普勒","产科学检查与测量指标.超声","","","Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑Cerebral 时刻;随机;随意;瞬间 术语""cerebral""指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
"47239-9","血流速度.收缩期.最大值","速度","时间点","大脑中动脉",,"超声.多普勒","产科学检查与测量指标.超声","","","Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑Cerebral 时刻;随机;随意;瞬间 术语""cerebral""指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)",""
1 LOINC_NUM COMPONENT PROPERTY TIME_ASPCT SYSTEM SCALE_TYP METHOD_TYP CLASS SHORTNAME LONG_COMMON_NAME RELATEDNAMES2 LinguisticVariantDisplayName
2 61438-8 血流速度.收缩期.最大值 速度 时间点 大脑中动脉 定量型 超声.多普勒 产科学检查与测量指标.超声 Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑(Cerebral) 时刻;随机;随意;瞬间 术语"cerebral"指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)
3 11704-4 血流速度.收缩期.最大值 速度 时间点 动脉导管 定量型 超声.多普勒 产科学检查与测量指标.超声 动态 动脉管 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)
4 17787-3 血流速度.收缩期.最大值 速度 时间点 二尖瓣^胎儿 定量型 超声.多普勒 产科学检查与测量指标.超声 僧帽瓣 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 胎;超系统 - 胎儿 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)
5 61438-6 速度 时间点 二尖瓣^胎儿 定量型 超声.多普勒 产科学检查与测量指标.超声 僧帽瓣 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 胎;超系统 - 胎儿 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)
6 10000-8 血流速度.收缩期.最大值 时间点 二尖瓣^胎儿 定量型 超声.多普勒 产科学检查与测量指标.超声 僧帽瓣 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 时刻;随机;随意;瞬间 流 流量;流速;流体 胎;超系统 - 胎儿 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)
7 17788-1 血流速度.收缩期.最大值 速度 大脑中动脉 定量型 超声.多普勒 产科学检查与测量指标.超声 Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑(Cerebral) 时刻;随机;随意;瞬间 术语"cerebral"指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)
8 11488-4 血流速度.收缩期.最大值 速度 时间点 定量型 超声.多普勒 产科学检查与测量指标.超声 Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑(Cerebral) 时刻;随机;随意;瞬间 术语"cerebral"指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)
9 47239-9 血流速度.收缩期.最大值 速度 时间点 大脑中动脉 超声.多普勒 产科学检查与测量指标.超声 Cereb 动态 可用数量表示的;定量性;数值型;数量型;连续数值型标尺 大脑(Cerebral) 时刻;随机;随意;瞬间 术语"cerebral"指的是主要由中枢半球(大脑皮质和基底神经节)组成的那部分脑结构 流 流量;流速;流体 血;全血 血流量;血液流量 速度(距离/时间);速率;速率(距离/时间)

View File

@ -0,0 +1,10 @@
LOINC_NUM,LOINC_LONG_COMMON_NAME,IEEE_CF_CODE10,IEEE_REFID,IEEE_DESCRIPTION,IEEE_DIM,IEEE_UOM_UCUM
11556-8,Oxygen [Partial pressure] in Blood,160116,MDC_CONC_PO2_GEN,,LMT-2L-2 LMT-2L-2,kPa mm[Hg]
11557-6,Carbon dioxide [Partial pressure] in Blood,160064,MDC_CONC_PCO2_GEN,,LMT-2L-2 LMT-2L-2,kPa mm[Hg]
11558-4,pH of Blood,160004,MDC_CONC_PH_GEN,,[pH],[pH]
12961-9,Urea nitrogen [Mass/volume] in Arterial blood,160080,MDC_CONC_UREA_ART,,ML-3 NL-3,mg/dL mmol/L
14749-6,Glucose [Moles/volume] in Serum or Plasma,160196,MDC_CONC_GLU_VENOUS_PLASMA,Plasma glucose concentration taken from venous,NL-3 ,mmol/L
14749-6,Glucose [Moles/volume] in Serum or Plasma,160368,MDC_CONC_GLU_UNDETERMINED_PLASMA,Plasma glucose concentration taken from undetermined sample source,NL-3 ,mmol/L
15074-8,Glucose [Moles/volume] in Blood,160020,MDC_CONC_GLU_GEN,,NL-3,mmol/L
15074-8,Glucose [Moles/volume] in Blood,160364,MDC_CONC_GLU_UNDETERMINED_WHOLEBLOOD,Whole blood glucose concentration taken from undetermined sample source,NL-3 ,mmol/L
17861-6,Calcium [Mass/volume] in Serum or Plasma,160024,MDC_CONC_CA_GEN,,ML-3,mg/dL
1 LOINC_NUM LOINC_LONG_COMMON_NAME IEEE_CF_CODE10 IEEE_REFID IEEE_DESCRIPTION IEEE_DIM IEEE_UOM_UCUM
2 11556-8 Oxygen [Partial pressure] in Blood 160116 MDC_CONC_PO2_GEN LMT-2L-2 LMT-2L-2 kPa mm[Hg]
3 11557-6 Carbon dioxide [Partial pressure] in Blood 160064 MDC_CONC_PCO2_GEN LMT-2L-2 LMT-2L-2 kPa mm[Hg]
4 11558-4 pH of Blood 160004 MDC_CONC_PH_GEN [pH] [pH]
5 12961-9 Urea nitrogen [Mass/volume] in Arterial blood 160080 MDC_CONC_UREA_ART ML-3 NL-3 mg/dL mmol/L
6 14749-6 Glucose [Moles/volume] in Serum or Plasma 160196 MDC_CONC_GLU_VENOUS_PLASMA Plasma glucose concentration taken from venous NL-3 mmol/L
7 14749-6 Glucose [Moles/volume] in Serum or Plasma 160368 MDC_CONC_GLU_UNDETERMINED_PLASMA Plasma glucose concentration taken from undetermined sample source NL-3 mmol/L
8 15074-8 Glucose [Moles/volume] in Blood 160020 MDC_CONC_GLU_GEN NL-3 mmol/L
9 15074-8 Glucose [Moles/volume] in Blood 160364 MDC_CONC_GLU_UNDETERMINED_WHOLEBLOOD Whole blood glucose concentration taken from undetermined sample source NL-3 mmol/L
10 17861-6 Calcium [Mass/volume] in Serum or Plasma 160024 MDC_CONC_CA_GEN ML-3 mg/dL

View File

@ -0,0 +1,10 @@
"LoincNumber","LongCommonName" ,"PartNumber","PartTypeName" ,"PartName" ,"PartSequenceOrder","RID" ,"PreferredName" ,"RPID" ,"LongName"
"17787-3" ,"NM Thyroid gland Study report","LP199995-4","Rad.Anatomic Location.Region Imaged","Neck" ,"A" ,"RID7488" ,"neck" ,"" ,""
"17787-3" ,"NM Thyroid gland Study report","LP206648-0","Rad.Anatomic Location.Imaging Focus","Thyroid gland" ,"A" ,"RID7578" ,"thyroid gland" ,"" ,""
"17787-3" ,"NM Thyroid gland Study report","LP208891-4","Rad.Modality.Modality type" ,"NM" ,"A" ,"RID10330","nuclear medicine imaging","" ,""
"24531-6" ,"US Retroperitoneum" ,"LP207608-3","Rad.Modality.Modality type" ,"US" ,"A" ,"RID10326","Ultrasound" ,"RPID2142","US Retroperitoneum"
"24531-6" ,"US Retroperitoneum" ,"LP199943-4","Rad.Anatomic Location.Imaging Focus","Retroperitoneum" ,"A" ,"RID431" ,"RETROPERITONEUM" ,"RPID2142","US Retroperitoneum"
"24531-6" ,"US Retroperitoneum" ,"LP199956-6","Rad.Anatomic Location.Region Imaged","Abdomen" ,"A" ,"RID56" ,"Abdomen" ,"RPID2142","US Retroperitoneum"
"24532-4" ,"US Abdomen RUQ" ,"LP199956-6","Rad.Anatomic Location.Region Imaged","Abdomen" ,"A" ,"RID56" ,"Abdomen" ,"" ,""
"24532-4" ,"US Abdomen RUQ" ,"LP207608-3","Rad.Modality.Modality type" ,"US" ,"A" ,"RID10326","Ultrasound" ,"" ,""
"24532-4" ,"US Abdomen RUQ" ,"LP208105-9","Rad.Anatomic Location.Imaging Focus","Right upper quadrant","A" ,"RID29994","Right upper quadrant" ,"" ,""
Can't render this file because it contains an unexpected character in line 1 and column 30.

View File

@ -0,0 +1,10 @@
"LOINC_NUM","LONG_COMMON_NAME","ORDER_OBS"
"42176-8","1,3 beta glucan [Mass/volume] in Serum","Both"
"53835-5","1,5-Anhydroglucitol [Mass/volume] in Serum or Plasma","Both"
"31019-3","10-Hydroxycarbazepine [Mass/volume] in Serum or Plasma","Both"
"6765-2","17-Hydroxypregnenolone [Mass/volume] in Serum or Plasma","Both"
"1668-3","17-Hydroxyprogesterone [Mass/volume] in Serum or Plasma","Both"
"32854-2","17-Hydroxyprogesterone [Presence] in DBS","Both"
"49054-0","25-Hydroxycalciferol [Mass/volume] in Serum or Plasma","Both"
"62292-8","25-Hydroxyvitamin D2+25-Hydroxyvitamin D3 [Mass/volume] in Serum or Plasma","Both"
"44907-4","5-Hydroxyindoleacetate panel - 24 hour Urine","Order"
1 LOINC_NUM LONG_COMMON_NAME ORDER_OBS
2 42176-8 1,3 beta glucan [Mass/volume] in Serum Both
3 53835-5 1,5-Anhydroglucitol [Mass/volume] in Serum or Plasma Both
4 31019-3 10-Hydroxycarbazepine [Mass/volume] in Serum or Plasma Both
5 6765-2 17-Hydroxypregnenolone [Mass/volume] in Serum or Plasma Both
6 1668-3 17-Hydroxyprogesterone [Mass/volume] in Serum or Plasma Both
7 32854-2 17-Hydroxyprogesterone [Presence] in DBS Both
8 49054-0 25-Hydroxycalciferol [Mass/volume] in Serum or Plasma Both
9 62292-8 25-Hydroxyvitamin D2+25-Hydroxyvitamin D3 [Mass/volume] in Serum or Plasma Both
10 44907-4 5-Hydroxyindoleacetate panel - 24 hour Urine Order

View File

@ -0,0 +1,10 @@
PATH_TO_ROOT,SEQUENCE,IMMEDIATE_PARENT,CODE,CODE_TEXT
,1,,LP31755-9,Microbiology
LP31755-9,1,LP31755-9,LP14559-6,Microorganism
LP31755-9.LP14559-6,1,LP14559-6,LP98185-9,Bacteria
LP31755-9.LP14559-6.LP98185-9,1,LP98185-9,LP14082-9,Bacteria
LP31755-9.LP14559-6.LP98185-9.LP14082-9,1,LP14082-9,LP52258-8,Bacteria | Body Fluid
LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52258-8,1,LP52258-8,41599-2,Bacteria Fld Ql Micro
LP31755-9.LP14559-6.LP98185-9.LP14082-9,2,LP14082-9,LP52260-4,Bacteria | Cerebral spinal fluid
LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52260-4,1,LP52260-4,41602-4,Bacteria CSF Ql Micro
LP31755-9.LP14559-6.LP98185-9.LP14082-9,3,LP14082-9,LP52960-9,Bacteria | Cervix
1 PATH_TO_ROOT SEQUENCE IMMEDIATE_PARENT CODE CODE_TEXT
2 1 LP31755-9 Microbiology
3 LP31755-9 1 LP31755-9 LP14559-6 Microorganism
4 LP31755-9.LP14559-6 1 LP14559-6 LP98185-9 Bacteria
5 LP31755-9.LP14559-6.LP98185-9 1 LP98185-9 LP14082-9 Bacteria
6 LP31755-9.LP14559-6.LP98185-9.LP14082-9 1 LP14082-9 LP52258-8 Bacteria | Body Fluid
7 LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52258-8 1 LP52258-8 41599-2 Bacteria Fld Ql Micro
8 LP31755-9.LP14559-6.LP98185-9.LP14082-9 2 LP14082-9 LP52260-4 Bacteria | Cerebral spinal fluid
9 LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52260-4 1 LP52260-4 41602-4 Bacteria CSF Ql Micro
10 LP31755-9.LP14559-6.LP98185-9.LP14082-9 3 LP14082-9 LP52960-9 Bacteria | Cervix

View File

@ -0,0 +1,12 @@
"AnswerListId","AnswerListName" ,"AnswerListOID" ,"ExtDefinedYN","ExtDefinedAnswerListCodeSystem","ExtDefinedAnswerListLink" ,"AnswerStringId","LocalAnswerCode","LocalAnswerCodeSystem","SequenceNumber","DisplayText" ,"ExtCodeId","ExtCodeDisplayName" ,"ExtCodeSystem" ,"ExtCodeSystemVersion" ,"ExtCodeSystemCopyrightNotice" ,"SubsequentTextPrompt","Description","Score"
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13825-7" ,"1" , ,1 ,"1 slice or 1 dinner roll" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13838-0" ,"2" , ,2 ,"2 slices or 2 dinner rolls" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13892-7" ,"3" , ,3 ,"More than 2 slices or 2 dinner rolls", , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA6270-8" ,"00" , ,1 ,"Never" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13836-4" ,"01" , ,2 ,"1-3 times per month" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13834-9" ,"02" , ,3 ,"1-2 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13853-9" ,"03" , ,4 ,"3-4 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13860-4" ,"04" , ,5 ,"5-6 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13827-3" ,"05" , ,6 ,"1 time per day" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA4389-8" ,"97" , ,11 ,"Refused" ,"443390004","Refused (qualifier value)","http://snomed.info/sct","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College", , ,
"LL1892-0" ,"ICD-9_ICD-10" ,"1.3.6.1.4.1.12009.10.1.1069","Y" , ,"http://www.cdc.gov/nchs/icd.htm", , , , , , , , , , , , ,
Can't render this file because it contains an unexpected character in line 1 and column 31.

View File

@ -0,0 +1,16 @@
"LOINC_NUM","COMPONENT" ,"PROPERTY","TIME_ASPCT","SYSTEM" ,"SCALE_TYP","METHOD_TYP" ,"CLASS" ,"VersionLastChanged","CHNG_TYPE","DefinitionDescription" ,"STATUS","CONSUMER_NAME","CLASSTYPE","FORMULA","SPECIES","EXMPL_ANSWERS","SURVEY_QUEST_TEXT" ,"SURVEY_QUEST_SRC" ,"UNITSREQUIRED","SUBMITTED_UNITS","RELATEDNAMES2" ,"SHORTNAME" ,"ORDER_OBS" ,"CDISC_COMMON_TESTS","HL7_FIELD_SUBFIELD_ID","EXTERNAL_COPYRIGHT_NOTICE" ,"EXAMPLE_UNITS","LONG_COMMON_NAME" ,"UnitsAndRange","DOCUMENT_SECTION","EXAMPLE_UCUM_UNITS","EXAMPLE_SI_UCUM_UNITS","STATUS_REASON","STATUS_TEXT","CHANGE_REASON_PUBLIC" ,"COMMON_TEST_RANK","COMMON_ORDER_RANK","COMMON_SI_TEST_RANK","HL7_ATTACHMENT_STRUCTURE","EXTERNAL_COPYRIGHT_LINK","PanelType","AskAtOrderEntry","AssociatedObservations" ,"VersionFirstReleased","ValidHL7AttachmentRequest"
"10013-1" ,"R' wave amplitude.lead I" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-I; R wave Amp L-I; Random; Right; Voltage" ,"R' wave Amp L-I" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead I" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"10014-9" ,"R' wave amplitude.lead II" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"2; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-II; R wave Amp L-II; Random; Right; Voltage" ,"R' wave Amp L-II" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead II" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"10015-6" ,"R' wave amplitude.lead III" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"3; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-III; R wave Amp L-III; Random; Right; Voltage" ,"R' wave Amp L-III" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead III" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"10016-4" ,"R' wave amplitude.lead V1" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V1; R wave Amp L-V1; Random; Right; Voltage" ,"R' wave Amp L-V1" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V1" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"1001-7" ,"DBG Ab" ,"Pr" ,"Pt" ,"Ser/Plas^donor" ,"Ord" , ,"BLDBK" ,"2.44" ,"MIN" , ,"ACTIVE", ,1 , , , , , , , ,"ABS; Aby; Antby; Anti; Antibodies; Antibody; Autoantibodies; Autoantibody; BLOOD BANK; Donna Bennett-Goodspeed; Donr; Ordinal; Pl; Plasma; Plsm; Point in time; QL; Qual; Qualitative; Random; Screen; SerP; SerPl; SerPl^donor; SerPlas; Serum; Serum or plasma; SR" ,"DBG Ab SerPl Donr Ql" ,"Observation", , , , ,"DBG Ab [Presence] in Serum or Plasma from donor" , , , , , , ,"The Property has been changed from ACnc to Pr (Presence) to reflect the new model for ordinal terms where results are based on presence or absence." ,0 ,0 ,0 , , , , , , ,
"10017-2" ,"R' wave amplitude.lead V2" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V2; R wave Amp L-V2; Random; Right; Voltage" ,"R' wave Amp L-V2" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V2" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"10018-0" ,"R' wave amplitude.lead V3" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V3; R wave Amp L-V3; Random; Right; Voltage" ,"R' wave Amp L-V3" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V3" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"10019-8" ,"R' wave amplitude.lead V4" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V4; R wave Amp L-V4; Random; Right; Voltage" ,"R' wave Amp L-V4" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V4" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"10020-6" ,"R' wave amplitude.lead V5" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V5; R wave Amp L-V5; Random; Right; Voltage" ,"R' wave Amp L-V5" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V5" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30D","Find" ,"Pt" ,"^Patient" ,"Ord" ,"PhenX" ,"PHENX" ,"2.44" ,"MIN" , ,"TRIAL" , ,2 , , , ,"Each time you eat bread, toast or dinner rolls, how much do you usually eat?","PhenX.050201100100","N" , ,"Finding; Findings; How much bread in 30D; Last; Ordinal; Point in time; QL; Qual; Qualitative; Random; Screen" ,"How much bread in 30D PhenX", , , , , ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]", , , , , , , ,0 ,0 ,0 , , , , , , ,
"10000-8" ,"R wave duration.lead AVR" ,"Time" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; Durat; ECG; EKG.MEASUREMENTS; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave dur L-AVR; R wave dur L-AVR; Random; Right" ,"R wave dur L-AVR" ,"Observation", , , ,"s" ,"R wave duration in lead AVR" , , ,"s" , , , , ,0 ,0 ,0 , , , , , , ,
"17787-3" ,"Study report" ,"Find" ,"Pt" ,"Neck>Thyroid gland","Doc" ,"NM" ,"RAD" ,"2.61" ,"MIN" , ,"ACTIVE", ,2 , , , , , , , ,"Document; Finding; Findings; Imaging; Point in time; Radiology; Random; Study report; Thy" ,"NM Thyroid Study report" ,"Both" , , , , ,"NM Thyroid gland Study report" , , , , , , ,"Changed System from ""Thyroid"" for conformance with the LOINC/RadLex unified model.; Method of ""Radnuc"" was changed to ""NM"". The LOINC/RadLex Committee agreed to use a subset of the two-letter DICOM modality codes as the primary modality identifier." ,0 ,0 ,0 ,"IG exists" , , , ,"81220-6;72230-6" ,"1.0l" ,
"17788-1" ,"Large unstained cells/100 leukocytes" ,"NFr" ,"Pt" ,"Bld" ,"Qn" ,"Automated count","HEM/BC" ,"2.50" ,"MIN" ,"Part of auto diff output of Bayer H*3S; peroxidase negative cells too large to be classified as lymph or basophil" ,"ACTIVE", ,1 , , , , , ,"Y" ,"%" ,"100WBC; Auto; Automated detection; Blood; Cell; Cellularity; Elec; Elect; Electr; HEMATOLOGY/CELL COUNTS; Leuc; Leuk; Leukocyte; Lkcs; LUC; Number Fraction; Percent; Point in time; QNT; Quan; Quant; Quantitative; Random; WB; WBC; WBCs; White blood cell; White blood cells; Whole blood" ,"LUC/leuk NFr Bld Auto" ,"Observation", , , ,"%" ,"Large unstained cells/100 leukocytes in Blood by Automated count" , , ,"%" , , , , ,1894 ,0 ,1894 , , , , , ,"1.0l" ,
"11488-4" ,"Consultation note" ,"Find" ,"Pt" ,"{Setting}" ,"Doc" ,"{Role}" ,"DOC.ONTOLOGY","2.63" ,"MIN" , ,"ACTIVE", ,2 , , , , , , , ,"Consult note; DOC.ONT; Document; Encounter; Evaluation and management; Evaluation and management note; Finding; Findings; notes; Point in time; Random; Visit note" ,"Consult note" ,"Both" , , , , ,"Consult note" , , , , , , ,"Edit made because this term is conformant to the Document Ontology axis values and therefore are being placed in this class.; Based on Clinical LOINC Committee decision during the September 2014 meeting, {Provider} was changed to {Author Type} to emphasize a greater breadth of potential document authors. At the September 2015 Clinical LOINC Committee meeting, the Committee decided to change {Author Type} to {Role} to align with the 'Role' axis name in the LOINC Document Ontology.; Because it is too difficult to maintain and because the distinction between documents and sections is not clear-cut nor necessary in most cases, the DOCUMENT_SECTION field has been deemed to have little value. The field has been set to null in the December 2017 release in preparation for removal in the December 2018 release. These changes were approved by the Clinical LOINC Committee.",0 ,0 ,0 ,"IG exists" , , , ,"81222-2;72231-4;81243-8","1.0j-a" ,"Y"
"47239-9" ,"Reason for stopping HIV Rx" ,"Type" ,"Pt" ,"^Patient" ,"Nom" ,"Reported" ,"ART" ,"2.50" ,"MIN" ,"Reason for stopping antiretroviral therapy" ,"ACTIVE", ,2 , , , , , ,"N" , ,"AIDS; Anti-retroviral therapy; ART; Human immunodeficiency virus; Nominal; Point in time; Random; Treatment; Typ" ,"Reason for stopping HIV Rx" ,"Observation", , ,"Copyright © 2006 World Health Organization. Used with permission. Publications of the World Health Organization can be obtained from WHO Press, World Health Organization, 20 Avenue Appia, 1211 Geneva 27, Switzerland (tel: +41 22 791 2476; fax: +41 22 791 4857; email: bookorders@who.int). Requests for permission to reproduce or translate WHO publications whether for sale or for noncommercial distribution should be addressed to WHO Press, at the above address (fax: +41 22 791 4806; email: permissions@who.int). The designations employed and the presentation of the material in this publication do not imply the expression of any opinion whatsoever on the part of the World Health Organization concerning the legal status of any country, territory, city or area or of its authorities, or concerning the delimitation of its frontiers or boundaries. Dotted lines on maps represent approximate border lines for which there may not yet be full agreement. The mention of specific companies or of certain manufacturers products does not imply that they are endorsed or recommended by the World Health Organization in preference to others of a similar nature that are not mentioned. Errors and omissions excepted, the names of proprietary products are distinguished by initial capital letters. All reasonable precautions have been taken by WHO to verify the information contained in this publication. However, the published material is being distributed without warranty of any kind, either express or implied. The responsibility for the interpretation and use of the material lies with the reader. In no event shall the World Health Organization be liable for damages arising from its use.", ,"Reason for stopping HIV treatment" , , , , , , , ,0 ,0 ,0 , ,"WHO_HIV" , , , ,"2.22" ,
Can't render this file because it contains an unexpected character in line 1 and column 23.

View File

@ -0,0 +1,11 @@
"LoincNumber","LongCommonName" ,"AnswerListId","AnswerListName" ,"AnswerListLinkType","ApplicableContext"
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]","LL1000-0" ,"PhenX05_13_30D bread amt","NORMATIVE" ,
"10061-0" ,"S' wave amplitude in lead I" ,"LL1311-1" ,"PhenX12_44" ,"EXAMPLE" ,
"10331-7" ,"Rh [Type] in Blood" ,"LL360-9" ,"Pos|Neg" ,"EXAMPLE" ,
"10389-5" ,"Blood product.other [Type]" ,"LL2413-4" ,"Othr bld prod" ,"EXAMPLE" ,
"10390-3" ,"Blood product special preparation [Type]" ,"LL2422-5" ,"Blood prod treatment" ,"EXAMPLE" ,
"10393-7" ,"Factor IX given [Type]" ,"LL2420-9" ,"Human/Recomb" ,"EXAMPLE" ,
"10395-2" ,"Factor VIII given [Type]" ,"LL2420-9" ,"Human/Recomb" ,"EXAMPLE" ,
"10401-8" ,"Immune serum globulin given [Type]" ,"LL2421-7" ,"IM/IV" ,"EXAMPLE" ,
"10410-9" ,"Plasma given [Type]" ,"LL2417-5" ,"Plasma type" ,"EXAMPLE" ,
"10568-4" ,"Clarity of Semen" ,"LL2427-4" ,"Clear/Opales/Milky" ,"EXAMPLE" ,
Can't render this file because it contains an unexpected character in line 1 and column 30.

View File

@ -0,0 +1,10 @@
"LoincNumber","LongCommonName","PartNumber","PartName","PartCodeSystem","PartTypeName","LinkTypeName","Property"
"10013-1","R' wave amplitude in lead I","LP31101-6","R' wave amplitude.lead I","http://loinc.org","COMPONENT","Primary","http://loinc.org/property/COMPONENT"
"10013-1","R' wave amplitude in lead I","LP6802-5","Elpot","http://loinc.org","PROPERTY","Primary","http://loinc.org/property/PROPERTY"
"10013-1","R' wave amplitude in lead I","LP6960-1","Pt","http://loinc.org","TIME","Primary","http://loinc.org/property/TIME_ASPCT"
"10013-1","R' wave amplitude in lead I","LP7289-4","Heart","http://loinc.org","SYSTEM","Primary","http://loinc.org/property/SYSTEM"
"10013-1","R' wave amplitude in lead I","LP7753-9","Qn","http://loinc.org","SCALE","Primary","http://loinc.org/property/SCALE_TYP"
"10013-1","R' wave amplitude in lead I","LP6244-0","EKG","http://loinc.org","METHOD","Primary","http://loinc.org/property/METHOD_TYP"
"10013-1","R' wave amplitude in lead I","LP31101-6","R' wave amplitude.lead I","http://loinc.org","COMPONENT","DetailedModel","http://loinc.org/property/analyte"
"10013-1","R' wave amplitude in lead I","LP6802-5","Elpot","http://loinc.org","PROPERTY","DetailedModel","http://loinc.org/property/PROPERTY"
"10013-1","R' wave amplitude in lead I","LP6960-1","Pt","http://loinc.org","TIME","DetailedModel","http://loinc.org/property/time-core"
1 LoincNumber LongCommonName PartNumber PartName PartCodeSystem PartTypeName LinkTypeName Property
2 10013-1 R' wave amplitude in lead I LP31101-6 R' wave amplitude.lead I http://loinc.org COMPONENT Primary http://loinc.org/property/COMPONENT
3 10013-1 R' wave amplitude in lead I LP6802-5 Elpot http://loinc.org PROPERTY Primary http://loinc.org/property/PROPERTY
4 10013-1 R' wave amplitude in lead I LP6960-1 Pt http://loinc.org TIME Primary http://loinc.org/property/TIME_ASPCT
5 10013-1 R' wave amplitude in lead I LP7289-4 Heart http://loinc.org SYSTEM Primary http://loinc.org/property/SYSTEM
6 10013-1 R' wave amplitude in lead I LP7753-9 Qn http://loinc.org SCALE Primary http://loinc.org/property/SCALE_TYP
7 10013-1 R' wave amplitude in lead I LP6244-0 EKG http://loinc.org METHOD Primary http://loinc.org/property/METHOD_TYP
8 10013-1 R' wave amplitude in lead I LP31101-6 R' wave amplitude.lead I http://loinc.org COMPONENT DetailedModel http://loinc.org/property/analyte
9 10013-1 R' wave amplitude in lead I LP6802-5 Elpot http://loinc.org PROPERTY DetailedModel http://loinc.org/property/PROPERTY
10 10013-1 R' wave amplitude in lead I LP6960-1 Pt http://loinc.org TIME DetailedModel http://loinc.org/property/time-core

View File

@ -0,0 +1,7 @@
"LoincNumber","LongCommonName" ,"PartNumber","PartName" ,"PartCodeSystem" ,"PartTypeName","LinkTypeName","Property"
"10013-1" ,"R' wave amplitude in lead I","LP31101-6" ,"R' wave amplitude.lead I","http://loinc.org","COMPONENT" ,"Primary" ,"http://loinc.org/property/COMPONENT"
"10013-1" ,"R' wave amplitude in lead I","LP6802-5" ,"Elpot" ,"http://loinc.org","PROPERTY" ,"Primary" ,"http://loinc.org/property/PROPERTY"
"10013-1" ,"R' wave amplitude in lead I","LP6960-1" ,"Pt" ,"http://loinc.org","TIME" ,"Primary" ,"http://loinc.org/property/TIME_ASPCT"
"10013-1" ,"R' wave amplitude in lead I","LP7289-4" ,"Heart" ,"http://loinc.org","SYSTEM" ,"Primary" ,"http://loinc.org/property/SYSTEM"
"10013-1" ,"R' wave amplitude in lead I","LP7753-9" ,"Qn" ,"http://loinc.org","SCALE" ,"Primary" ,"http://loinc.org/property/SCALE_TYP"
"10013-1" ,"R' wave amplitude in lead I","LP6244-0" ,"EKG" ,"http://loinc.org","METHOD" ,"Primary" ,"http://loinc.org/property/METHOD_TYP"
Can't render this file because it contains an unexpected character in line 1 and column 30.

View File

@ -0,0 +1,13 @@
"LoincNumber","LongCommonName" ,"PartNumber","PartName" ,"PartCodeSystem" ,"PartTypeName","LinkTypeName" ,"Property"
"10013-1" ,"R' wave amplitude in lead I","LP31101-6" ,"R' wave amplitude.lead I","http://loinc.org","COMPONENT" ,"DetailedModel" ,"http://loinc.org/property/analyte"
"10013-1" ,"R' wave amplitude in lead I","LP6802-5" ,"Elpot" ,"http://loinc.org","PROPERTY" ,"DetailedModel" ,"http://loinc.org/property/PROPERTY"
"10013-1" ,"R' wave amplitude in lead I","LP6960-1" ,"Pt" ,"http://loinc.org","TIME" ,"DetailedModel" ,"http://loinc.org/property/time-core"
"10013-1" ,"R' wave amplitude in lead I","LP7289-4" ,"Heart" ,"http://loinc.org","SYSTEM" ,"DetailedModel" ,"http://loinc.org/property/system-core"
"10013-1" ,"R' wave amplitude in lead I","LP7753-9" ,"Qn" ,"http://loinc.org","SCALE" ,"DetailedModel" ,"http://loinc.org/property/SCALE_TYP"
"10013-1" ,"R' wave amplitude in lead I","LP6244-0" ,"EKG" ,"http://loinc.org","METHOD" ,"DetailedModel" ,"http://loinc.org/property/METHOD_TYP"
"10013-1" ,"R' wave amplitude in lead I","LP31101-6" ,"R' wave amplitude.lead I","http://loinc.org","COMPONENT" ,"SyntaxEnhancement","http://loinc.org/property/analyte-core"
"10013-1" ,"R' wave amplitude in lead I","LP190563-9","Cardiology" ,"http://loinc.org","CLASS" ,"Metadata" ,"http://loinc.org/property/category"
"10013-1" ,"R' wave amplitude in lead I","LP29708-2" ,"Cardiology" ,"http://loinc.org","CLASS" ,"Metadata" ,"http://loinc.org/property/category"
"10013-1" ,"R' wave amplitude in lead I","LP7787-7" ,"Clinical" ,"http://loinc.org","CLASS" ,"Metadata" ,"http://loinc.org/property/category"
"10013-1" ,"R' wave amplitude in lead I","LP7795-0" ,"EKG measurements" ,"http://loinc.org","CLASS" ,"Metadata" ,"http://loinc.org/property/category"
"10013-1" ,"R' wave amplitude in lead I","LP7795-0" ,"EKG.MEAS" ,"http://loinc.org","CLASS" ,"Metadata" ,"http://loinc.org/property/CLASS"
Can't render this file because it contains an unexpected character in line 1 and column 30.

Some files were not shown because too many files have changed in this diff Show More