Merge remote-tracking branch 'remotes/origin/master' into ja_20190822_1440_infinispan_query_cache
# Conflicts: # hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java # src/changes/changes.xml
This commit is contained in:
commit
259426b0dd
|
@ -76,7 +76,7 @@ public class ValidateUtil {
|
|||
}
|
||||
|
||||
public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage, Object... theValues) {
|
||||
if (theSuccess == false) {
|
||||
if (!theSuccess) {
|
||||
throw new InvalidRequestException(String.format(theMessage, theValues));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,8 +17,8 @@ ca.uhn.fhir.rest.client.impl.GenericClient.noPagingLinkFoundInBundle=Can not per
|
|||
ca.uhn.fhir.rest.client.impl.GenericClient.noVersionIdForVread=No version specified in URL for 'vread' operation: {0}
|
||||
ca.uhn.fhir.rest.client.impl.GenericClient.incompleteUriForRead=The given URI is not an absolute URL and is not usable for this operation: {0}
|
||||
ca.uhn.fhir.rest.client.impl.GenericClient.cannotDetermineResourceTypeFromUri=Unable to determine the resource type from the given URI: {0}
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0}
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext).
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0}
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext).
|
||||
|
||||
ca.uhn.fhir.rest.server.interceptor.auth.RuleImplOp.invalidRequestBundleTypeForTransaction=Invalid request Bundle.type value for transaction: {0}
|
||||
|
||||
|
@ -46,14 +46,14 @@ ca.uhn.fhir.rest.param.BaseParam.chainNotSupported=Invalid search parameter "{0}
|
|||
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.invalidContentTypeInRequest=Incorrect Content-Type header value of "{0}" was provided in the request. A FHIR Content-Type is required for "{1}" operation
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.noContentTypeInRequest=No Content-Type header was provided in the request. This is required for "{0}" operation
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1}
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1}
|
||||
|
||||
ca.uhn.fhir.parser.ParserState.wrongResourceTypeFound=Incorrect resource type found, expected "{0}" but found "{1}"
|
||||
ca.uhn.fhir.rest.server.RestfulServer.getPagesNonHttpGet=Requests for _getpages must use HTTP GET
|
||||
ca.uhn.fhir.rest.server.RestfulServer.unknownMethod=Invalid request: The FHIR endpoint on this server does not know how to handle {0} operation[{1}] with parameters [{2}]
|
||||
ca.uhn.fhir.rest.server.RestfulServer.rootRequest=This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name.
|
||||
ca.uhn.fhir.rest.server.RestfulServer.rootRequest.multitenant=This is the base URL of a multitenant FHIR server. Unable to handle this request, as it does not contain a tenant ID.
|
||||
ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body?
|
||||
ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body?
|
||||
ca.uhn.fhir.validation.FhirValidator.noPhWarningOnStartup=Ph-schematron library not found on classpath, will not attempt to perform schematron validation
|
||||
ca.uhn.fhir.validation.FhirValidator.noPhError=Ph-schematron library not found on classpath, can not enable perform schematron validation
|
||||
ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected during validation
|
||||
|
@ -62,22 +62,22 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri
|
|||
|
||||
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlMultipleMatches=Invalid match URL "{0}" - Multiple resources match this search
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate index matching query: {1} (existing index belongs to {2})
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\")
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists
|
||||
|
@ -97,8 +97,8 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulDeletes=Successfully delet
|
|||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter=Unknown search parameter "{0}". Value search parameters for this search are: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0}
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request
|
||||
|
||||
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.externalReferenceNotAllowed=Resource contains external reference to URL "{0}" but this server is not configured to allow external references
|
||||
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1}
|
||||
|
@ -118,8 +118,8 @@ ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascadi
|
|||
|
||||
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.noAttachmentDataPresent=The resource with ID {0} has no data at path: {1}
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted.
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1}
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted.
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1}
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownType=Content in resource of type {0} at path {1} is not appropriate for binary storage: {2}
|
||||
|
||||
|
||||
|
@ -127,5 +127,11 @@ ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemU
|
|||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.valueSetNotReadyForExpand=ValueSet is not ready for operation $expand; current status: {0} | {1}
|
||||
|
||||
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.notExpanded=The ValueSet is waiting to be picked up and pre-expanded by a scheduled task.
|
||||
ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expansionInProgress=The ValueSet has been picked up by a scheduled task and pre-expansion is in progress.
|
||||
ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expanded=The ValueSet has been picked up by a scheduled task and pre-expansion is complete.
|
||||
ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.failedToExpand=The ValueSet has been picked up by a scheduled task and pre-expansion has failed.
|
||||
|
|
|
@ -90,28 +90,12 @@
|
|||
<artifactId>commons-cli</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- This example uses Derby embedded database. If you are using another database such as Mysql or Oracle, you may omit the following dependencies and replace them with an appropriate database client
|
||||
<!-- This example uses H2 embedded database. If you are using another database such as Mysql or Oracle, you may omit the following dependencies and replace them with an appropriate database client
|
||||
dependency for your database platform. -->
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derby</artifactId>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbynet</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbyclient</artifactId>
|
||||
</dependency>
|
||||
<!--<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbyshared</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbytools</artifactId>
|
||||
</dependency>-->
|
||||
|
||||
<!-- The following dependencies are only needed for automated unit tests, you do not neccesarily need them to run the example. -->
|
||||
<dependency>
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
package ca.uhn.fhir.jpa.demo;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||
import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.util.Properties;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
|
@ -38,20 +37,20 @@ public class CommonConfig {
|
|||
}
|
||||
|
||||
/**
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_derby_files".
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:h2:file:target./jpaserver_h2_files" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_h2_files".
|
||||
*
|
||||
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
|
||||
*/
|
||||
@Bean(destroyMethod = "close")
|
||||
public DataSource dataSource() {
|
||||
String url = "jdbc:derby:directory:target/jpaserver_derby_files;create=true";
|
||||
String url = "jdbc:h2:file:./target/jpaserver_h2_files";
|
||||
if (isNotBlank(ContextHolder.getDatabaseUrl())) {
|
||||
url = ContextHolder.getDatabaseUrl();
|
||||
}
|
||||
|
||||
BasicDataSource retVal = new BasicDataSource();
|
||||
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
|
||||
retVal.setDriver(new org.h2.Driver());
|
||||
retVal.setUrl(url);
|
||||
retVal.setUsername("");
|
||||
retVal.setPassword("");
|
||||
|
@ -61,7 +60,7 @@ public class CommonConfig {
|
|||
@Bean
|
||||
public Properties jpaProperties() {
|
||||
Properties extraProperties = new Properties();
|
||||
extraProperties.put("hibernate.dialect", DerbyTenSevenHapiFhirDialect.class.getName());
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.format_sql", "true");
|
||||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||
|
|
|
@ -146,7 +146,11 @@
|
|||
<artifactId>logback-classic</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
|
|
|
@ -152,6 +152,18 @@ public class DaoConfig {
|
|||
private boolean myPreExpandValueSetsExperimental = false;
|
||||
private boolean myFilterParameterEnabled = false;
|
||||
private StoreMetaSourceInformation myStoreMetaSourceInformation = StoreMetaSourceInformation.SOURCE_URI_AND_REQUEST_ID;
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production! Do not change default of {@code 0}!
|
||||
*/
|
||||
private int myPreExpandValueSetsDefaultOffsetExperimental = 0;
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}!
|
||||
*/
|
||||
private int myPreExpandValueSetsDefaultCountExperimental = 1000;
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}!
|
||||
*/
|
||||
private int myPreExpandValueSetsMaxCountExperimental = 1000;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -1707,6 +1719,86 @@ public class DaoConfig {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production!
|
||||
* <p>
|
||||
* This is the default value of {@code offset} parameter for the ValueSet {@code $expand} operation when
|
||||
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value for this setting is {@code 0}.
|
||||
* </p>
|
||||
*/
|
||||
public int getPreExpandValueSetsDefaultOffsetExperimental() {
|
||||
return myPreExpandValueSetsDefaultOffsetExperimental;
|
||||
}
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production!
|
||||
* <p>
|
||||
* This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when
|
||||
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value for this setting is {@code 1000}.
|
||||
* </p>
|
||||
*/
|
||||
public int getPreExpandValueSetsDefaultCountExperimental() {
|
||||
return myPreExpandValueSetsDefaultCountExperimental;
|
||||
}
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production!
|
||||
* <p>
|
||||
* This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when
|
||||
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
|
||||
* </p>
|
||||
* <p>
|
||||
* If {@code thePreExpandValueSetsDefaultCountExperimental} is greater than
|
||||
* {@link DaoConfig#getPreExpandValueSetsMaxCountExperimental()}, the lesser value is used.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value for this setting is {@code 1000}.
|
||||
* </p>
|
||||
*/
|
||||
public void setPreExpandValueSetsDefaultCountExperimental(int thePreExpandValueSetsDefaultCountExperimental) {
|
||||
myPreExpandValueSetsDefaultCountExperimental = Math.min(thePreExpandValueSetsDefaultCountExperimental, getPreExpandValueSetsMaxCountExperimental());
|
||||
}
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production!
|
||||
* <p>
|
||||
* This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when
|
||||
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value for this setting is {@code 1000}.
|
||||
* </p>
|
||||
*/
|
||||
public int getPreExpandValueSetsMaxCountExperimental() {
|
||||
return myPreExpandValueSetsMaxCountExperimental;
|
||||
}
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production!
|
||||
* <p>
|
||||
* This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when
|
||||
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
|
||||
* </p>
|
||||
* <p>
|
||||
* If {@code thePreExpandValueSetsMaxCountExperimental} is lesser than
|
||||
* {@link DaoConfig#getPreExpandValueSetsDefaultCountExperimental()}, the default {@code count} is lowered to the
|
||||
* new max {@code count}.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value for this setting is {@code 1000}.
|
||||
* </p>
|
||||
*/
|
||||
public void setPreExpandValueSetsMaxCountExperimental(int thePreExpandValueSetsMaxCountExperimental) {
|
||||
myPreExpandValueSetsMaxCountExperimental = thePreExpandValueSetsMaxCountExperimental;
|
||||
setPreExpandValueSetsDefaultCountExperimental(Math.min(getPreExpandValueSetsDefaultCountExperimental(), getPreExpandValueSetsMaxCountExperimental()));
|
||||
}
|
||||
|
||||
public enum IndexEnabledEnum {
|
||||
ENABLED,
|
||||
DISABLED
|
||||
|
|
|
@ -20,15 +20,25 @@ package ca.uhn.fhir.jpa.dao;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IContextValidationSupport;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt;
|
||||
import ca.uhn.fhir.model.dstu2.composite.CodingDt;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ValueSet;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeInclude;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeIncludeConcept;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ValueSet.ExpansionContains;
|
||||
import ca.uhn.fhir.model.primitive.DateTimeDt;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import org.apache.commons.codec.binary.StringUtils;
|
||||
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
|
||||
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
|
||||
|
@ -38,20 +48,14 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt;
|
||||
import ca.uhn.fhir.model.dstu2.composite.CodingDt;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ValueSet;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ValueSet.*;
|
||||
import ca.uhn.fhir.model.primitive.DateTimeDt;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
||||
implements IFhirResourceDaoValueSet<ValueSet, CodingDt, CodeableConceptDt>, IFhirResourceDaoCodeSystem<ValueSet, CodingDt, CodeableConceptDt> {
|
||||
|
@ -95,7 +99,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
|||
public ValueSet expand(IIdType theId, String theFilter, RequestDetails theRequest) {
|
||||
ValueSet source = loadValueSetForExpansion(theId, theRequest);
|
||||
return expand(source, theFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequest) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -131,6 +139,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expandByIdentifier(String theUri, String theFilter) {
|
||||
if (isBlank(theUri)) {
|
||||
|
@ -153,7 +166,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
|||
}
|
||||
|
||||
return expand(source, theFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,15 +27,21 @@ public interface IFhirResourceDaoValueSet<T extends IBaseResource, CD, CC> exten
|
|||
|
||||
T expand(IIdType theId, String theFilter, RequestDetails theRequestDetails);
|
||||
|
||||
T expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails);
|
||||
|
||||
T expand(T theSource, String theFilter);
|
||||
|
||||
T expand(T theSource, String theFilter, int theOffset, int theCount);
|
||||
|
||||
T expandByIdentifier(String theUri, String theFilter);
|
||||
|
||||
T expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount);
|
||||
|
||||
void purgeCaches();
|
||||
|
||||
ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode, IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, CD theCoding, CC theCodeableConcept, RequestDetails theRequestDetails);
|
||||
|
||||
public class ValidateCodeResult {
|
||||
class ValidateCodeResult {
|
||||
private String myDisplay;
|
||||
private String myMessage;
|
||||
private boolean myResult;
|
||||
|
|
|
@ -21,20 +21,34 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface ITermValueSetConceptDao extends JpaRepository<TermValueSetConcept, Long> {
|
||||
|
||||
@Query("SELECT COUNT(*) FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid")
|
||||
Integer countByTermValueSetId(@Param("pid") Long theValueSetId);
|
||||
|
||||
@Query("DELETE FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid")
|
||||
@Modifying
|
||||
void deleteByTermValueSetId(@Param("pid") Long theValueSetId);
|
||||
|
||||
@Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval")
|
||||
Optional<TermValueSetConcept> findByValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode);
|
||||
@Query("SELECT vsc from TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid")
|
||||
Slice<TermValueSetConcept> findByTermValueSetId(Pageable thePage, @Param("pid") Long theValueSetId);
|
||||
|
||||
@Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval")
|
||||
Optional<TermValueSetConcept> findByTermValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode);
|
||||
|
||||
@Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.myCode = :codeval")
|
||||
List<TermValueSetConcept> findOneByValueSetIdAndCode(@Param("resource_pid") Long theValueSetId, @Param("codeval") String theCode);
|
||||
|
||||
@Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval")
|
||||
List<TermValueSetConcept> findOneByValueSetIdSystemAndCode(@Param("resource_pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode);
|
||||
}
|
||||
|
|
|
@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
|
@ -28,8 +30,13 @@ import org.springframework.data.repository.query.Param;
|
|||
|
||||
public interface ITermValueSetConceptDesignationDao extends JpaRepository<TermValueSetConceptDesignation, Long> {
|
||||
|
||||
@Query("DELETE FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myValueSet.myId = :pid")
|
||||
@Query("SELECT COUNT(vscd) FROM TermValueSetConceptDesignation vscd WHERE vscd.myValueSet.myId = :pid")
|
||||
Integer countByTermValueSetId(@Param("pid") Long theValueSetId);
|
||||
|
||||
@Query("DELETE FROM TermValueSetConceptDesignation vscd WHERE vscd.myValueSet.myId = :pid")
|
||||
@Modifying
|
||||
void deleteByTermValueSetId(@Param("pid") Long theValueSetId);
|
||||
|
||||
@Query("SELECT vscd FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid")
|
||||
Slice<TermValueSetConceptDesignation> findByTermValueSetConceptId(Pageable thePage, @Param("pid") Long theValueSetConceptId);
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetExpansionStatusEnum;
|
||||
import org.springframework.data.domain.Page;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
|
@ -44,6 +44,6 @@ public interface ITermValueSetDao extends JpaRepository<TermValueSet, Long> {
|
|||
Optional<TermValueSet> findByUrl(@Param("url") String theUrl);
|
||||
|
||||
@Query("SELECT vs FROM TermValueSet vs WHERE vs.myExpansionStatus = :expansion_status")
|
||||
Page<TermValueSet> findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetExpansionStatusEnum theExpansionStatus);
|
||||
Slice<TermValueSet> findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetPreExpansionStatusEnum theExpansionStatus);
|
||||
|
||||
}
|
||||
|
|
|
@ -75,6 +75,12 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
return expand(source, theFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) {
|
||||
ValueSet source = read(theId, theRequestDetails);
|
||||
return expand(source, theFilter, theOffset, theCount);
|
||||
}
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource) {
|
||||
|
||||
validateIncludes("include", theSource.getCompose().getInclude());
|
||||
|
@ -105,7 +111,38 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
ValueSet retVal = outcome.getValueset();
|
||||
retVal.setStatus(PublicationStatus.ACTIVE);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) {
|
||||
|
||||
validateIncludes("include", theSource.getCompose().getInclude());
|
||||
validateIncludes("exclude", theSource.getCompose().getExclude());
|
||||
|
||||
/*
|
||||
* If all of the code systems are supported by the HAPI FHIR terminology service, let's
|
||||
* use that as it's more efficient.
|
||||
*/
|
||||
|
||||
boolean allSystemsAreSuppportedByTerminologyService = true;
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
|
||||
allSystemsAreSuppportedByTerminologyService = false;
|
||||
}
|
||||
}
|
||||
for (ConceptSetComponent next : theSource.getCompose().getExclude()) {
|
||||
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
|
||||
allSystemsAreSuppportedByTerminologyService = false;
|
||||
}
|
||||
}
|
||||
if (allSystemsAreSuppportedByTerminologyService) {
|
||||
return (ValueSet) myTerminologySvc.expandValueSet(theSource, theOffset, theCount);
|
||||
}
|
||||
|
||||
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);
|
||||
ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null);
|
||||
ValueSet retVal = outcome.getValueset();
|
||||
retVal.setStatus(PublicationStatus.ACTIVE);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) {
|
||||
|
@ -148,20 +185,42 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
// }
|
||||
//
|
||||
// return expand(defaultValueSet, theFilter);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet source, String theFilter) {
|
||||
public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) {
|
||||
if (isBlank(theUri)) {
|
||||
throw new InvalidRequestException("URI must not be blank or missing");
|
||||
}
|
||||
|
||||
ValueSet source = new ValueSet();
|
||||
source.setUrl(theUri);
|
||||
|
||||
source.getCompose().addInclude().addValueSet(theUri);
|
||||
|
||||
if (isNotBlank(theFilter)) {
|
||||
ConceptSetComponent include = source.getCompose().addInclude();
|
||||
ConceptSetFilterComponent filter = include.addFilter();
|
||||
filter.setProperty("display");
|
||||
filter.setOp(FilterOperator.EQUAL);
|
||||
filter.setValue(theFilter);
|
||||
}
|
||||
|
||||
ValueSet retVal = doExpand(source, theOffset, theCount);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet theSource, String theFilter) {
|
||||
ValueSet toExpand = new ValueSet();
|
||||
|
||||
// for (UriType next : source.getCompose().getInclude()) {
|
||||
// for (UriType next : theSource.getCompose().getInclude()) {
|
||||
// ConceptSetComponent include = toExpand.getCompose().addInclude();
|
||||
// include.setSystem(next.getValue());
|
||||
// addFilterIfPresent(theFilter, include);
|
||||
// }
|
||||
|
||||
for (ConceptSetComponent next : source.getCompose().getInclude()) {
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
toExpand.getCompose().addInclude(next);
|
||||
addFilterIfPresent(theFilter, next);
|
||||
}
|
||||
|
@ -170,7 +229,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand");
|
||||
}
|
||||
|
||||
toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude());
|
||||
toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude());
|
||||
|
||||
ValueSet retVal = doExpand(toExpand);
|
||||
|
||||
|
@ -179,7 +238,32 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) {
|
||||
ValueSet toExpand = new ValueSet();
|
||||
toExpand.setId(theSource.getId());
|
||||
toExpand.setUrl(theSource.getUrl());
|
||||
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
toExpand.getCompose().addInclude(next);
|
||||
addFilterIfPresent(theFilter, next);
|
||||
}
|
||||
|
||||
if (toExpand.getCompose().isEmpty()) {
|
||||
throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand");
|
||||
}
|
||||
|
||||
toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude());
|
||||
|
||||
ValueSet retVal = doExpand(toExpand, theOffset, theCount);
|
||||
|
||||
if (isNotBlank(theFilter)) {
|
||||
applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void applyFilter(IntegerType theTotalElement, List<ValueSetExpansionContainsComponent> theContains, String theFilter) {
|
||||
|
@ -246,10 +330,14 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
}
|
||||
|
||||
if (vs != null) {
|
||||
ValueSet expansion = doExpand(vs);
|
||||
List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains();
|
||||
|
||||
ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept);
|
||||
ValidateCodeResult result;
|
||||
if (myDaoConfig.isPreExpandValueSetsExperimental()) {
|
||||
result = myTerminologySvc.validateCodeIsInPreExpandedValueSet(vs, toStringOrNull(theSystem), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept);
|
||||
} else {
|
||||
ValueSet expansion = doExpand(vs);
|
||||
List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains();
|
||||
result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept);
|
||||
}
|
||||
if (result != null) {
|
||||
if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) {
|
||||
if (!theDisplay.getValue().equals(result.getDisplay())) {
|
||||
|
@ -269,7 +357,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
}
|
||||
|
||||
private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode,
|
||||
Coding theCoding, CodeableConcept theCodeableConcept) {
|
||||
Coding theCoding, CodeableConcept theCodeableConcept) {
|
||||
for (ValueSetExpansionContainsComponent nextCode : contains) {
|
||||
ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept);
|
||||
if (result != null) {
|
||||
|
|
|
@ -21,19 +21,24 @@ package ca.uhn.fhir.jpa.dao.dstu3;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.OperationOutcome;
|
||||
import org.hl7.fhir.dstu3.model.Resource;
|
||||
import org.hl7.fhir.dstu3.model.codesystems.IssueType;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class TransactionProcessorVersionAdapterDstu3 implements TransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.BundleEntryComponent> {
|
||||
@Override
|
||||
public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) {
|
||||
|
@ -106,6 +111,22 @@ public class TransactionProcessorVersionAdapterDstu3 implements TransactionProce
|
|||
if (value != null) {
|
||||
retVal = value.toCode();
|
||||
}
|
||||
|
||||
/*
|
||||
* This is a workaround for the fact that PATCH isn't a valid constant for
|
||||
* DSTU3 Bundle.entry.request.method (it was added in R4)
|
||||
*/
|
||||
if (isBlank(retVal)) {
|
||||
if (theEntry.getResource() instanceof IBaseBinary) {
|
||||
String contentType = ((IBaseBinary) theEntry.getResource()).getContentType();
|
||||
try {
|
||||
PatchTypeEnum.forContentTypeOrThrowInvalidRequestException(contentType);
|
||||
retVal = "PATCH";
|
||||
} catch (InvalidRequestException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
|
|
@ -70,6 +70,12 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
return expand(source, theFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) {
|
||||
ValueSet source = read(theId, theRequestDetails);
|
||||
return expand(source, theFilter, theOffset, theCount);
|
||||
}
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource) {
|
||||
|
||||
/*
|
||||
|
@ -109,6 +115,32 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
// return retVal;
|
||||
}
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) {
|
||||
boolean allSystemsAreSuppportedByTerminologyService = true;
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) {
|
||||
allSystemsAreSuppportedByTerminologyService = false;
|
||||
}
|
||||
}
|
||||
for (ConceptSetComponent next : theSource.getCompose().getExclude()) {
|
||||
if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) {
|
||||
allSystemsAreSuppportedByTerminologyService = false;
|
||||
}
|
||||
}
|
||||
if (allSystemsAreSuppportedByTerminologyService) {
|
||||
return myTerminologySvc.expandValueSet(theSource, theOffset, theCount);
|
||||
}
|
||||
|
||||
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);
|
||||
|
||||
ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null);
|
||||
|
||||
ValueSet retVal = outcome.getValueset();
|
||||
retVal.setStatus(PublicationStatus.ACTIVE);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) {
|
||||
for (ConceptSetComponent nextExclude : listToValidate) {
|
||||
if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) {
|
||||
|
@ -149,20 +181,42 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
// }
|
||||
//
|
||||
// return expand(defaultValueSet, theFilter);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet source, String theFilter) {
|
||||
public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) {
|
||||
if (isBlank(theUri)) {
|
||||
throw new InvalidRequestException("URI must not be blank or missing");
|
||||
}
|
||||
|
||||
ValueSet source = new ValueSet();
|
||||
source.setUrl(theUri);
|
||||
|
||||
source.getCompose().addInclude().addValueSet(theUri);
|
||||
|
||||
if (isNotBlank(theFilter)) {
|
||||
ConceptSetComponent include = source.getCompose().addInclude();
|
||||
ConceptSetFilterComponent filter = include.addFilter();
|
||||
filter.setProperty("display");
|
||||
filter.setOp(FilterOperator.EQUAL);
|
||||
filter.setValue(theFilter);
|
||||
}
|
||||
|
||||
ValueSet retVal = doExpand(source, theOffset, theCount);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet theSource, String theFilter) {
|
||||
ValueSet toExpand = new ValueSet();
|
||||
|
||||
// for (UriType next : source.getCompose().getInclude()) {
|
||||
// for (UriType next : theSource.getCompose().getInclude()) {
|
||||
// ConceptSetComponent include = toExpand.getCompose().addInclude();
|
||||
// include.setSystem(next.getValue());
|
||||
// addFilterIfPresent(theFilter, include);
|
||||
// }
|
||||
|
||||
for (ConceptSetComponent next : source.getCompose().getInclude()) {
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
toExpand.getCompose().addInclude(next);
|
||||
addFilterIfPresent(theFilter, next);
|
||||
}
|
||||
|
@ -171,7 +225,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand");
|
||||
}
|
||||
|
||||
toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude());
|
||||
toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude());
|
||||
|
||||
ValueSet retVal = doExpand(toExpand);
|
||||
|
||||
|
@ -180,7 +234,32 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) {
|
||||
ValueSet toExpand = new ValueSet();
|
||||
toExpand.setId(theSource.getId());
|
||||
toExpand.setUrl(theSource.getUrl());
|
||||
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
toExpand.getCompose().addInclude(next);
|
||||
addFilterIfPresent(theFilter, next);
|
||||
}
|
||||
|
||||
if (toExpand.getCompose().isEmpty()) {
|
||||
throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand");
|
||||
}
|
||||
|
||||
toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude());
|
||||
|
||||
ValueSet retVal = doExpand(toExpand, theOffset, theCount);
|
||||
|
||||
if (isNotBlank(theFilter)) {
|
||||
applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void applyFilter(IntegerType theTotalElement, List<ValueSetExpansionContainsComponent> theContains, String theFilter) {
|
||||
|
@ -207,15 +286,15 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
}
|
||||
|
||||
@Override
|
||||
public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode,
|
||||
public ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode,
|
||||
IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding,
|
||||
CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) {
|
||||
|
||||
List<IIdType> valueSetIds = Collections.emptyList();
|
||||
|
||||
boolean haveCodeableConcept = theCodeableConcept != null && theCodeableConcept.getCoding().size() > 0;
|
||||
boolean haveCoding = theCoding != null && theCoding.isEmpty() == false;
|
||||
boolean haveCode = theCode != null && theCode.isEmpty() == false;
|
||||
boolean haveCoding = theCoding != null && !theCoding.isEmpty();
|
||||
boolean haveCode = theCode != null && !theCode.isEmpty();
|
||||
|
||||
if (!haveCodeableConcept && !haveCoding && !haveCode) {
|
||||
throw new InvalidRequestException("No code, coding, or codeableConcept provided to validate");
|
||||
|
@ -224,7 +303,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
throw new InvalidRequestException("$validate-code can only validate (system AND code) OR (coding) OR (codeableConcept)");
|
||||
}
|
||||
|
||||
boolean haveIdentifierParam = theValueSetIdentifier != null && theValueSetIdentifier.isEmpty() == false;
|
||||
boolean haveIdentifierParam = theValueSetIdentifier != null && !theValueSetIdentifier.isEmpty();
|
||||
ValueSet vs = null;
|
||||
if (theId != null) {
|
||||
vs = read(theId, theRequestDetails);
|
||||
|
@ -241,15 +320,20 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
// String system = toStringOrNull(theSystem);
|
||||
IContextValidationSupport.LookupCodeResult result = myCodeSystemDao.lookupCode(theCode, theSystem, null, null);
|
||||
if (result.isFound()) {
|
||||
ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult retVal = new ValidateCodeResult(true, "Found code", result.getCodeDisplay());
|
||||
ValidateCodeResult retVal = new ValidateCodeResult(true, "Found code", result.getCodeDisplay());
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
||||
if (vs != null) {
|
||||
ValueSet expansion = doExpand(vs);
|
||||
List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains();
|
||||
ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept);
|
||||
ValidateCodeResult result;
|
||||
if (myDaoConfig.isPreExpandValueSetsExperimental()) {
|
||||
result = myTerminologySvc.validateCodeIsInPreExpandedValueSet(vs, toStringOrNull(theSystem), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept);
|
||||
} else {
|
||||
ValueSet expansion = doExpand(vs);
|
||||
List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains();
|
||||
result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept);
|
||||
}
|
||||
if (result != null) {
|
||||
if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) {
|
||||
if (!theDisplay.getValue().equals(result.getDisplay())) {
|
||||
|
@ -268,10 +352,10 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
return thePrimitive != null ? thePrimitive.getValue() : null;
|
||||
}
|
||||
|
||||
private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode,
|
||||
private ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode,
|
||||
Coding theCoding, CodeableConcept theCodeableConcept) {
|
||||
for (ValueSetExpansionContainsComponent nextCode : contains) {
|
||||
ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept);
|
||||
ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept);
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -70,6 +70,12 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> imple
|
|||
return expand(source, theFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) {
|
||||
ValueSet source = read(theId, theRequestDetails);
|
||||
return expand(source, theFilter, theOffset, theCount);
|
||||
}
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource) {
|
||||
|
||||
/*
|
||||
|
@ -109,6 +115,38 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> imple
|
|||
// return retVal;
|
||||
}
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) {
|
||||
|
||||
/*
|
||||
* If all of the code systems are supported by the HAPI FHIR terminology service, let's
|
||||
* use that as it's more efficient.
|
||||
*/
|
||||
|
||||
boolean allSystemsAreSuppportedByTerminologyService = true;
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) {
|
||||
allSystemsAreSuppportedByTerminologyService = false;
|
||||
}
|
||||
}
|
||||
for (ConceptSetComponent next : theSource.getCompose().getExclude()) {
|
||||
if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) {
|
||||
allSystemsAreSuppportedByTerminologyService = false;
|
||||
}
|
||||
}
|
||||
if (allSystemsAreSuppportedByTerminologyService) {
|
||||
return (ValueSet) myTerminologySvc.expandValueSet(theSource, theOffset, theCount);
|
||||
}
|
||||
|
||||
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);
|
||||
|
||||
ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null);
|
||||
|
||||
ValueSet retVal = outcome.getValueset();
|
||||
retVal.setStatus(PublicationStatus.ACTIVE);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) {
|
||||
for (ConceptSetComponent nextExclude : listToValidate) {
|
||||
if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) {
|
||||
|
@ -149,20 +187,42 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> imple
|
|||
// }
|
||||
//
|
||||
// return expand(defaultValueSet, theFilter);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet source, String theFilter) {
|
||||
public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) {
|
||||
if (isBlank(theUri)) {
|
||||
throw new InvalidRequestException("URI must not be blank or missing");
|
||||
}
|
||||
|
||||
ValueSet source = new ValueSet();
|
||||
source.setUrl(theUri);
|
||||
|
||||
source.getCompose().addInclude().addValueSet(theUri);
|
||||
|
||||
if (isNotBlank(theFilter)) {
|
||||
ConceptSetComponent include = source.getCompose().addInclude();
|
||||
ConceptSetFilterComponent filter = include.addFilter();
|
||||
filter.setProperty("display");
|
||||
filter.setOp(FilterOperator.EQUAL);
|
||||
filter.setValue(theFilter);
|
||||
}
|
||||
|
||||
ValueSet retVal = doExpand(source, theOffset, theCount);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet theSource, String theFilter) {
|
||||
ValueSet toExpand = new ValueSet();
|
||||
|
||||
// for (UriType next : source.getCompose().getInclude()) {
|
||||
// for (UriType next : theSource.getCompose().getInclude()) {
|
||||
// ConceptSetComponent include = toExpand.getCompose().addInclude();
|
||||
// include.setSystem(next.getValue());
|
||||
// addFilterIfPresent(theFilter, include);
|
||||
// }
|
||||
|
||||
for (ConceptSetComponent next : source.getCompose().getInclude()) {
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
toExpand.getCompose().addInclude(next);
|
||||
addFilterIfPresent(theFilter, next);
|
||||
}
|
||||
|
@ -171,7 +231,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> imple
|
|||
throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand");
|
||||
}
|
||||
|
||||
toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude());
|
||||
toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude());
|
||||
|
||||
ValueSet retVal = doExpand(toExpand);
|
||||
|
||||
|
@ -180,7 +240,32 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> imple
|
|||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) {
|
||||
ValueSet toExpand = new ValueSet();
|
||||
toExpand.setId(theSource.getId());
|
||||
toExpand.setUrl(theSource.getUrl());
|
||||
|
||||
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
|
||||
toExpand.getCompose().addInclude(next);
|
||||
addFilterIfPresent(theFilter, next);
|
||||
}
|
||||
|
||||
if (toExpand.getCompose().isEmpty()) {
|
||||
throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand");
|
||||
}
|
||||
|
||||
toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude());
|
||||
|
||||
ValueSet retVal = doExpand(toExpand, theOffset, theCount);
|
||||
|
||||
if (isNotBlank(theFilter)) {
|
||||
applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void applyFilter(IntegerType theTotalElement, List<ValueSetExpansionContainsComponent> theContains, String theFilter) {
|
||||
|
@ -247,9 +332,14 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> imple
|
|||
}
|
||||
|
||||
if (vs != null) {
|
||||
ValueSet expansion = doExpand(vs);
|
||||
List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains();
|
||||
ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept);
|
||||
ValidateCodeResult result;
|
||||
if (myDaoConfig.isPreExpandValueSetsExperimental()) {
|
||||
result = myTerminologySvc.validateCodeIsInPreExpandedValueSet(vs, toStringOrNull(theSystem), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept);
|
||||
} else {
|
||||
ValueSet expansion = doExpand(vs);
|
||||
List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains();
|
||||
result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept);
|
||||
}
|
||||
if (result != null) {
|
||||
if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) {
|
||||
if (!theDisplay.getValue().equals(result.getDisplay())) {
|
||||
|
|
|
@ -71,11 +71,11 @@ public class TermValueSet implements Serializable {
|
|||
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(name = "EXPANSION_STATUS", nullable = false, length = MAX_EXPANSION_STATUS_LENGTH)
|
||||
private TermValueSetExpansionStatusEnum myExpansionStatus;
|
||||
private TermValueSetPreExpansionStatusEnum myExpansionStatus;
|
||||
|
||||
public TermValueSet() {
|
||||
super();
|
||||
myExpansionStatus = TermValueSetExpansionStatusEnum.NOT_EXPANDED;
|
||||
myExpansionStatus = TermValueSetPreExpansionStatusEnum.NOT_EXPANDED;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
|
@ -120,11 +120,11 @@ public class TermValueSet implements Serializable {
|
|||
return myConcepts;
|
||||
}
|
||||
|
||||
public TermValueSetExpansionStatusEnum getExpansionStatus() {
|
||||
public TermValueSetPreExpansionStatusEnum getExpansionStatus() {
|
||||
return myExpansionStatus;
|
||||
}
|
||||
|
||||
public void setExpansionStatus(TermValueSetExpansionStatusEnum theExpansionStatus) {
|
||||
public void setExpansionStatus(TermValueSetPreExpansionStatusEnum theExpansionStatus) {
|
||||
myExpansionStatus = theExpansionStatus;
|
||||
}
|
||||
|
||||
|
|
|
@ -52,6 +52,16 @@ public class TermValueSetConceptDesignation implements Serializable {
|
|||
@JoinColumn(name = "VALUESET_CONCEPT_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VALUESET_CONCEPT_PID"))
|
||||
private TermValueSetConcept myConcept;
|
||||
|
||||
@ManyToOne()
|
||||
@JoinColumn(name = "VALUESET_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VSCD_VS_PID"))
|
||||
private TermValueSet myValueSet;
|
||||
|
||||
@Transient
|
||||
private String myValueSetUrl;
|
||||
|
||||
@Transient
|
||||
private String myValueSetName;
|
||||
|
||||
@Column(name = "LANG", nullable = true, length = MAX_LENGTH)
|
||||
private String myLanguage;
|
||||
|
||||
|
@ -80,6 +90,31 @@ public class TermValueSetConceptDesignation implements Serializable {
|
|||
return this;
|
||||
}
|
||||
|
||||
public TermValueSet getValueSet() {
|
||||
return myValueSet;
|
||||
}
|
||||
|
||||
public TermValueSetConceptDesignation setValueSet(TermValueSet theValueSet) {
|
||||
myValueSet = theValueSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getValueSetUrl() {
|
||||
if (myValueSetUrl == null) {
|
||||
myValueSetUrl = getValueSet().getUrl();
|
||||
}
|
||||
|
||||
return myValueSetUrl;
|
||||
}
|
||||
|
||||
public String getValueSetName() {
|
||||
if (myValueSetName == null) {
|
||||
myValueSetName = getValueSet().getName();
|
||||
}
|
||||
|
||||
return myValueSetName;
|
||||
}
|
||||
|
||||
public String getLanguage() {
|
||||
return myLanguage;
|
||||
}
|
||||
|
@ -167,6 +202,9 @@ public class TermValueSetConceptDesignation implements Serializable {
|
|||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("myId", myId)
|
||||
.append(myConcept != null ? ("myConcept - id=" + myConcept.getId()) : ("myConcept=(null)"))
|
||||
.append(myValueSet != null ? ("myValueSet - id=" + myValueSet.getId()) : ("myValueSet=(null)"))
|
||||
.append("myValueSetUrl", this.getValueSetUrl())
|
||||
.append("myValueSetName", this.getValueSetName())
|
||||
.append("myLanguage", myLanguage)
|
||||
.append("myUseSystem", myUseSystem)
|
||||
.append("myUseCode", myUseCode)
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
/**
|
||||
* This enum is used to indicate the expansion status of a given ValueSet in the terminology tables. In this context,
|
||||
* an expanded ValueSet has its included concepts stored in the terminology tables as well.
|
||||
*/
|
||||
public enum TermValueSetExpansionStatusEnum {
|
||||
|
||||
/**
|
||||
* This status indicates the ValueSet is waiting to be picked up and expanded by a scheduled task.
|
||||
*/
|
||||
NOT_EXPANDED,
|
||||
/**
|
||||
* This status indicates the ValueSet has been picked up by a scheduled task and is mid-expansion.
|
||||
*/
|
||||
EXPANSION_IN_PROGRESS,
|
||||
/**
|
||||
* This status indicates the ValueSet has been picked up by a scheduled task and expansion is complete.
|
||||
*/
|
||||
EXPANDED
|
||||
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This enum is used to indicate the pre-expansion status of a given ValueSet in the terminology tables. In this context,
|
||||
* an expanded ValueSet has its included concepts stored in the terminology tables as well.
|
||||
*/
|
||||
public enum TermValueSetPreExpansionStatusEnum {
|
||||
/**
|
||||
* Sorting agnostic.
|
||||
*/
|
||||
|
||||
NOT_EXPANDED("notExpanded"),
|
||||
EXPANSION_IN_PROGRESS("expansionInProgress"),
|
||||
EXPANDED("expanded"),
|
||||
FAILED_TO_EXPAND("failedToExpand");
|
||||
|
||||
private static Map<String, TermValueSetPreExpansionStatusEnum> ourValues;
|
||||
private String myCode;
|
||||
|
||||
TermValueSetPreExpansionStatusEnum(String theCode) {
|
||||
myCode = theCode;
|
||||
}
|
||||
|
||||
public String getCode() {
|
||||
return myCode;
|
||||
}
|
||||
|
||||
public static TermValueSetPreExpansionStatusEnum fromCode(String theCode) {
|
||||
if (ourValues == null) {
|
||||
HashMap<String, TermValueSetPreExpansionStatusEnum> values = new HashMap<String, TermValueSetPreExpansionStatusEnum>();
|
||||
for (TermValueSetPreExpansionStatusEnum next : values()) {
|
||||
values.put(next.getCode(), next);
|
||||
}
|
||||
ourValues = Collections.unmodifiableMap(values);
|
||||
}
|
||||
return ourValues.get(theCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from Enum ordinal to Enum type.
|
||||
*
|
||||
* Usage:
|
||||
*
|
||||
* <code>TermValueSetExpansionStatusEnum termValueSetExpansionStatusEnum = TermValueSetExpansionStatusEnum.values[ordinal];</code>
|
||||
*/
|
||||
public static final TermValueSetPreExpansionStatusEnum values[] = values();
|
||||
}
|
|
@ -1,9 +1,10 @@
|
|||
package ca.uhn.fhir.jpa.provider;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.util.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
|
@ -12,6 +13,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
|||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.jboss.logging.MDC;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.Date;
|
||||
|
@ -42,6 +44,10 @@ import java.util.TreeSet;
|
|||
public class BaseJpaProvider {
|
||||
public static final String REMOTE_ADDR = "req.remoteAddr";
|
||||
public static final String REMOTE_UA = "req.userAgent";
|
||||
|
||||
@Autowired
|
||||
protected DaoConfig myDaoConfig;
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaProvider.class);
|
||||
private FhirContext myContext;
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletRequest;
|
|||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDstu3<ValueSet> {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetDstu3.class);
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true)
|
||||
public ValueSet expand(
|
||||
|
@ -46,6 +47,8 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst
|
|||
@OperationParam(name = "url", min = 0, max = 1) UriType theUrl,
|
||||
@OperationParam(name = "identifier", min = 0, max = 1) UriType theIdentifier,
|
||||
@OperationParam(name = "filter", min = 0, max = 1) StringType theFilter,
|
||||
@OperationParam(name = "offset", min = 0, max = 1) IntegerType theOffset,
|
||||
@OperationParam(name = "count", min = 0, max = 1) IntegerType theCount,
|
||||
RequestDetails theRequestDetails) {
|
||||
|
||||
boolean haveId = theId != null && theId.hasIdPart();
|
||||
|
@ -55,27 +58,59 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst
|
|||
}
|
||||
|
||||
boolean haveIdentifier = url != null && isNotBlank(url.getValue());
|
||||
boolean haveValueSet = theValueSet != null && theValueSet.isEmpty() == false;
|
||||
boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty();
|
||||
|
||||
if (!haveId && !haveIdentifier && !haveValueSet) {
|
||||
throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request");
|
||||
throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request.");
|
||||
}
|
||||
|
||||
if (moreThanOneTrue(haveId, haveIdentifier, haveValueSet)) {
|
||||
throw new InvalidRequestException("$expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options.");
|
||||
}
|
||||
|
||||
int offset = myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental();
|
||||
if (theOffset != null && theOffset.hasValue()) {
|
||||
if (theOffset.getValue() >= 0) {
|
||||
offset = theOffset.getValue();
|
||||
} else {
|
||||
throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental();
|
||||
if (theCount != null && theCount.hasValue()) {
|
||||
if (theCount.getValue() >= 0) {
|
||||
count = theCount.getValue();
|
||||
} else {
|
||||
throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue());
|
||||
}
|
||||
}
|
||||
int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental();
|
||||
if (count > countMax) {
|
||||
ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax);
|
||||
count = countMax;
|
||||
}
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> dao = (IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept>) getDao();
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter));
|
||||
if (myDaoConfig.isPreExpandValueSetsExperimental()) {
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter), offset, count);
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter), offset, count);
|
||||
}
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter));
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter));
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter));
|
||||
}
|
||||
}
|
||||
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ public class BaseJpaResourceProviderConceptMapR4 extends JpaResourceProviderR4<C
|
|||
&& theSourceValueSet.hasValue();
|
||||
boolean haveSourceCoding = theSourceCoding != null
|
||||
&& theSourceCoding.hasCode();
|
||||
boolean haveSourceCodeableConcept= theSourceCodeableConcept != null
|
||||
boolean haveSourceCodeableConcept = theSourceCodeableConcept != null
|
||||
&& theSourceCodeableConcept.hasCoding()
|
||||
&& theSourceCodeableConcept.getCodingFirstRep().hasCode();
|
||||
boolean haveTargetValueSet = theTargetValueSet != null
|
||||
|
|
|
@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletRequest;
|
|||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class BaseJpaResourceProviderValueSetR4 extends JpaResourceProviderR4<ValueSet> {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetR4.class);
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true)
|
||||
public ValueSet expand(
|
||||
|
@ -43,31 +44,65 @@ public class BaseJpaResourceProviderValueSetR4 extends JpaResourceProviderR4<Val
|
|||
@OperationParam(name = "valueSet", min = 0, max = 1) ValueSet theValueSet,
|
||||
@OperationParam(name = "url", min = 0, max = 1) UriType theUrl,
|
||||
@OperationParam(name = "filter", min = 0, max = 1) StringType theFilter,
|
||||
@OperationParam(name = "offset", min = 0, max = 1) IntegerType theOffset,
|
||||
@OperationParam(name = "count", min = 0, max = 1) IntegerType theCount,
|
||||
RequestDetails theRequestDetails) {
|
||||
|
||||
boolean haveId = theId != null && theId.hasIdPart();
|
||||
boolean haveIdentifier = theUrl != null && isNotBlank(theUrl.getValue());
|
||||
boolean haveValueSet = theValueSet != null && theValueSet.isEmpty() == false;
|
||||
boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty();
|
||||
|
||||
if (!haveId && !haveIdentifier && !haveValueSet) {
|
||||
throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request");
|
||||
throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request.");
|
||||
}
|
||||
|
||||
if (moreThanOneTrue(haveId, haveIdentifier, haveValueSet)) {
|
||||
throw new InvalidRequestException("$expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.");
|
||||
}
|
||||
|
||||
int offset = myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental();
|
||||
if (theOffset != null && theOffset.hasValue()) {
|
||||
if (theOffset.getValue() >= 0) {
|
||||
offset = theOffset.getValue();
|
||||
} else {
|
||||
throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental();
|
||||
if (theCount != null && theCount.hasValue()) {
|
||||
if (theCount.getValue() >= 0) {
|
||||
count = theCount.getValue();
|
||||
} else {
|
||||
throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue());
|
||||
}
|
||||
}
|
||||
int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental();
|
||||
if (count > countMax) {
|
||||
ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax);
|
||||
count = countMax;
|
||||
}
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> dao = (IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept>) getDao();
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter));
|
||||
if (myDaoConfig.isPreExpandValueSetsExperimental()) {
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count);
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter), offset, count);
|
||||
}
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter));
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter));
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter));
|
||||
}
|
||||
}
|
||||
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletRequest;
|
|||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5<ValueSet> {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetR5.class);
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true)
|
||||
public ValueSet expand(
|
||||
|
@ -43,31 +44,65 @@ public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5<Val
|
|||
@OperationParam(name = "valueSet", min = 0, max = 1) ValueSet theValueSet,
|
||||
@OperationParam(name = "url", min = 0, max = 1) UriType theUrl,
|
||||
@OperationParam(name = "filter", min = 0, max = 1) StringType theFilter,
|
||||
@OperationParam(name = "offset", min = 0, max = 1) IntegerType theOffset,
|
||||
@OperationParam(name = "count", min = 0, max = 1) IntegerType theCount,
|
||||
RequestDetails theRequestDetails) {
|
||||
|
||||
boolean haveId = theId != null && theId.hasIdPart();
|
||||
boolean haveIdentifier = theUrl != null && isNotBlank(theUrl.getValue());
|
||||
boolean haveValueSet = theValueSet != null && theValueSet.isEmpty() == false;
|
||||
boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty();
|
||||
|
||||
if (!haveId && !haveIdentifier && !haveValueSet) {
|
||||
throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request");
|
||||
throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request.");
|
||||
}
|
||||
|
||||
if (moreThanOneTrue(haveId, haveIdentifier, haveValueSet)) {
|
||||
throw new InvalidRequestException("$expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.");
|
||||
}
|
||||
|
||||
int offset = myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental();
|
||||
if (theOffset != null && theOffset.hasValue()) {
|
||||
if (theOffset.getValue() >= 0) {
|
||||
offset = theOffset.getValue();
|
||||
} else {
|
||||
throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental();
|
||||
if (theCount != null && theCount.hasValue()) {
|
||||
if (theCount.getValue() >= 0) {
|
||||
count = theCount.getValue();
|
||||
} else {
|
||||
throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue());
|
||||
}
|
||||
}
|
||||
int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental();
|
||||
if (count > countMax) {
|
||||
ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax);
|
||||
count = countMax;
|
||||
}
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> dao = (IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept>) getDao();
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter));
|
||||
if (myDaoConfig.isPreExpandValueSetsExperimental()) {
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count);
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter), offset, count);
|
||||
}
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter));
|
||||
if (haveId) {
|
||||
return dao.expand(theId, toFilterString(theFilter), theRequestDetails);
|
||||
} else if (haveIdentifier) {
|
||||
return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter));
|
||||
} else {
|
||||
return dao.expand(theValueSet, toFilterString(theFilter));
|
||||
}
|
||||
}
|
||||
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.term;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IContextValidationSupport;
|
||||
import ca.uhn.fhir.jpa.dao.*;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.dao.data.*;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
|
@ -351,7 +352,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
if (optionalExistingTermConceptMapById.isPresent()) {
|
||||
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
|
||||
|
||||
ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId());
|
||||
ourLog.info("Deleting existing TermConceptMap[{}] and its children...", existingTermConceptMap.getId());
|
||||
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
|
||||
|
||||
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
|
||||
|
@ -368,7 +369,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
}
|
||||
|
||||
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
|
||||
ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId());
|
||||
ourLog.info("Done deleting existing TermConceptMap[{}] and its children.", existingTermConceptMap.getId());
|
||||
|
||||
ourLog.info("Flushing...");
|
||||
myConceptMapGroupElementTargetDao.flush();
|
||||
|
@ -392,11 +393,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
if (optionalExistingTermValueSetById.isPresent()) {
|
||||
TermValueSet existingTermValueSet = optionalExistingTermValueSetById.get();
|
||||
|
||||
ourLog.info("Deleting existing TermValueSet {} and its children...", existingTermValueSet.getId());
|
||||
ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId());
|
||||
myValueSetConceptDesignationDao.deleteByTermValueSetId(existingTermValueSet.getId());
|
||||
myValueSetConceptDao.deleteByTermValueSetId(existingTermValueSet.getId());
|
||||
myValueSetDao.deleteByTermValueSetId(existingTermValueSet.getId());
|
||||
ourLog.info("Done deleting existing TermValueSet {} and its children.", existingTermValueSet.getId());
|
||||
ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId());
|
||||
|
||||
ourLog.info("Flushing...");
|
||||
myValueSetConceptDesignationDao.flush();
|
||||
|
@ -420,7 +421,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
count = 0;
|
||||
while (true) {
|
||||
Slice<T> link = theLoader.get();
|
||||
if (link.hasContent() == false) {
|
||||
if (!link.hasContent()) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -478,28 +479,221 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
return valueSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public ValueSet expandValueSet(ValueSet theValueSetToExpand, int theOffset, int theCount) {
|
||||
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null");
|
||||
|
||||
Optional<TermValueSet> optionalTermValueSet;
|
||||
if (theValueSetToExpand.hasId()) {
|
||||
optionalTermValueSet = myValueSetDao.findByResourcePid(theValueSetToExpand.getIdElement().getIdPartAsLong());
|
||||
} else if (theValueSetToExpand.hasUrl()) {
|
||||
optionalTermValueSet = myValueSetDao.findByUrl(theValueSetToExpand.getUrl());
|
||||
} else {
|
||||
throw new UnprocessableEntityException("ValueSet to be expanded must provide either ValueSet.id or ValueSet.url");
|
||||
}
|
||||
|
||||
if (!optionalTermValueSet.isPresent()) {
|
||||
throw new InvalidRequestException("ValueSet is not present in terminology tables: " + theValueSetToExpand.getUrl());
|
||||
}
|
||||
|
||||
TermValueSet termValueSet = optionalTermValueSet.get();
|
||||
|
||||
validatePreExpansionStatusOfValueSetOrThrowException(termValueSet.getExpansionStatus());
|
||||
|
||||
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
|
||||
expansionComponent.setIdentifier(UUID.randomUUID().toString());
|
||||
expansionComponent.setTimestamp(new Date());
|
||||
|
||||
populateExpansionComponent(expansionComponent, termValueSet, theOffset, theCount);
|
||||
|
||||
ValueSet valueSet = new ValueSet();
|
||||
valueSet.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
valueSet.setCompose(theValueSetToExpand.getCompose());
|
||||
valueSet.setExpansion(expansionComponent);
|
||||
return valueSet;
|
||||
}
|
||||
|
||||
private void validatePreExpansionStatusOfValueSetOrThrowException(TermValueSetPreExpansionStatusEnum thePreExpansionStatus) {
|
||||
if (TermValueSetPreExpansionStatusEnum.EXPANDED != thePreExpansionStatus) {
|
||||
String statusMsg = myContext.getLocalizer().getMessage(
|
||||
TermValueSetPreExpansionStatusEnum.class,
|
||||
thePreExpansionStatus.getCode());
|
||||
String msg = myContext.getLocalizer().getMessage(
|
||||
BaseHapiTerminologySvcImpl.class,
|
||||
"valueSetNotReadyForExpand",
|
||||
thePreExpansionStatus.name(),
|
||||
statusMsg);
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private void populateExpansionComponent(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) {
|
||||
int total = myValueSetConceptDao.countByTermValueSetId(theTermValueSet.getId());
|
||||
theExpansionComponent.setTotal(total);
|
||||
theExpansionComponent.setOffset(theOffset);
|
||||
theExpansionComponent.addParameter().setName("offset").setValue(new IntegerType(theOffset));
|
||||
theExpansionComponent.addParameter().setName("count").setValue(new IntegerType(theCount));
|
||||
|
||||
if (theCount == 0 || total == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
expandConcepts(theExpansionComponent, theTermValueSet, theOffset, theCount);
|
||||
}
|
||||
|
||||
private void expandConcepts(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) {
|
||||
int conceptsExpanded = 0;
|
||||
for (int i = theOffset; i < (theOffset + theCount); i++) {
|
||||
final int page = i;
|
||||
Supplier<Slice<TermValueSetConcept>> loader = () -> myValueSetConceptDao.findByTermValueSetId(PageRequest.of(page, 1), theTermValueSet.getId());
|
||||
|
||||
Slice<TermValueSetConcept> slice = loader.get();
|
||||
if (!slice.hasContent()) {
|
||||
break;
|
||||
}
|
||||
|
||||
for (TermValueSetConcept concept : slice.getContent()) {
|
||||
ValueSet.ValueSetExpansionContainsComponent containsComponent = theExpansionComponent.addContains();
|
||||
containsComponent.setSystem(concept.getSystem());
|
||||
containsComponent.setCode(concept.getCode());
|
||||
containsComponent.setDisplay(concept.getDisplay());
|
||||
|
||||
// TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation to make this optional.
|
||||
expandDesignations(theTermValueSet, concept, containsComponent);
|
||||
|
||||
if (++conceptsExpanded % 250 == 0) {
|
||||
ourLog.info("Have expanded {} concepts in ValueSet[{}]", conceptsExpanded, theTermValueSet.getUrl());
|
||||
}
|
||||
}
|
||||
|
||||
if (!slice.hasNext()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (conceptsExpanded > 0) {
|
||||
ourLog.info("Have expanded {} concepts in ValueSet[{}]", conceptsExpanded, theTermValueSet.getUrl());
|
||||
}
|
||||
}
|
||||
|
||||
private void expandDesignations(TermValueSet theValueSet, TermValueSetConcept theConcept, ValueSet.ValueSetExpansionContainsComponent theContainsComponent) {
|
||||
int designationsExpanded = 0;
|
||||
int index = 0;
|
||||
while (true) {
|
||||
final int page = index++;
|
||||
Supplier<Slice<TermValueSetConceptDesignation>> loader = () -> myValueSetConceptDesignationDao.findByTermValueSetConceptId(PageRequest.of(page, 1000), theConcept.getId());
|
||||
|
||||
Slice<TermValueSetConceptDesignation> slice = loader.get();
|
||||
if (!slice.hasContent()) {
|
||||
break;
|
||||
}
|
||||
|
||||
for (TermValueSetConceptDesignation designation : slice.getContent()) {
|
||||
ValueSet.ConceptReferenceDesignationComponent designationComponent = theContainsComponent.addDesignation();
|
||||
designationComponent.setLanguage(designation.getLanguage());
|
||||
designationComponent.setUse(new Coding(
|
||||
designation.getUseSystem(),
|
||||
designation.getUseCode(),
|
||||
designation.getUseDisplay()));
|
||||
designationComponent.setValue(designation.getValue());
|
||||
|
||||
if (++designationsExpanded % 250 == 0) {
|
||||
ourLog.info("Have expanded {} designations for Concept[{}|{}] in ValueSet[{}]", designationsExpanded, theConcept.getSystem(), theConcept.getCode(), theValueSet.getUrl());
|
||||
}
|
||||
}
|
||||
|
||||
if (!slice.hasNext()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (designationsExpanded > 0) {
|
||||
ourLog.info("Have expanded {} designations for Concept[{}|{}] in ValueSet[{}]", designationsExpanded, theConcept.getSystem(), theConcept.getCode(), theValueSet.getUrl());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) {
|
||||
expandValueSet(theValueSetToExpand, theValueSetCodeAccumulator, new AtomicInteger(0));
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
private void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, AtomicInteger theCodeCounter) {
|
||||
Set<String> addedCodes = new HashSet<>();
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
String valueSetInfo = getValueSetInfo(theValueSetToExpand);
|
||||
ourLog.info("Working with {}", valueSetInfo);
|
||||
|
||||
// Handle includes
|
||||
ourLog.debug("Handling includes");
|
||||
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) {
|
||||
boolean add = true;
|
||||
expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter);
|
||||
for (int i = 0; ; i++) {
|
||||
int finalI = i;
|
||||
Boolean shouldContinue = myTxTemplate.execute(t -> {
|
||||
boolean add = true;
|
||||
return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter, finalI);
|
||||
});
|
||||
if (!shouldContinue) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle excludes
|
||||
ourLog.debug("Handling excludes");
|
||||
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) {
|
||||
boolean add = false;
|
||||
expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter);
|
||||
for (ValueSet.ConceptSetComponent exclude : theValueSetToExpand.getCompose().getExclude()) {
|
||||
for (int i = 0; ; i++) {
|
||||
int finalI = i;
|
||||
Boolean shouldContinue = myTxTemplate.execute(t -> {
|
||||
boolean add = false;
|
||||
return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, exclude, add, theCodeCounter, finalI);
|
||||
});
|
||||
if (!shouldContinue) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("Done working with {} in {}ms", valueSetInfo, sw.getMillis());
|
||||
}
|
||||
|
||||
private String getValueSetInfo(ValueSet theValueSet) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
boolean isIdentified = false;
|
||||
sb
|
||||
.append("ValueSet:");
|
||||
if (theValueSet.hasId()) {
|
||||
isIdentified = true;
|
||||
sb
|
||||
.append(" ValueSet.id[")
|
||||
.append(theValueSet.getId())
|
||||
.append("]");
|
||||
}
|
||||
if (theValueSet.hasUrl()) {
|
||||
isIdentified = true;
|
||||
sb
|
||||
.append(" ValueSet.url[")
|
||||
.append(theValueSet.getUrl())
|
||||
.append("]");
|
||||
}
|
||||
if (theValueSet.hasIdentifier()) {
|
||||
isIdentified = true;
|
||||
sb
|
||||
.append(" ValueSet.identifier[")
|
||||
.append(theValueSet.getIdentifierFirstRep().getSystem())
|
||||
.append("|")
|
||||
.append(theValueSet.getIdentifierFirstRep().getValue())
|
||||
.append("]");
|
||||
}
|
||||
|
||||
if (!isIdentified) {
|
||||
sb.append(" None of ValueSet.id, ValueSet.url, and ValueSet.identifier are provided.");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
protected List<VersionIndependentConcept> expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.r4.model.ValueSet theValueSetToExpandR4) {
|
||||
|
@ -513,16 +707,21 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private void expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set<String> theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter) {
|
||||
/**
|
||||
* @return Returns true if there are potentially more results to process.
|
||||
*/
|
||||
private Boolean expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set<String> theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter, int theQueryIndex) {
|
||||
|
||||
String system = theInclude.getSystem();
|
||||
boolean hasSystem = isNotBlank(system);
|
||||
boolean hasValueSet = theInclude.getValueSet().size() > 0;
|
||||
|
||||
if (hasSystem) {
|
||||
ourLog.info("Starting {} expansion around code system: {}", (theAdd ? "inclusion" : "exclusion"), system);
|
||||
ourLog.info("Starting {} expansion around CodeSystem: {}", (theAdd ? "inclusion" : "exclusion"), system);
|
||||
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
|
||||
if (cs != null) {
|
||||
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
|
||||
|
@ -532,7 +731,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
*/
|
||||
if (myFulltextSearchSvc == null) {
|
||||
expandWithoutHibernateSearch(theValueSetCodeAccumulator, theAddedCodes, theInclude, system, theAdd, theCodeCounter);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -592,10 +791,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
String value = nextFilter.getValue();
|
||||
if (value.endsWith("$")) {
|
||||
value = value.substring(0, value.length() - 1);
|
||||
} else if (value.endsWith(".*") == false) {
|
||||
} else if (!value.endsWith(".*")) {
|
||||
value = value + ".*";
|
||||
}
|
||||
if (value.startsWith("^") == false && value.startsWith(".*") == false) {
|
||||
if (!value.startsWith("^") && !value.startsWith(".*")) {
|
||||
value = ".*" + value;
|
||||
} else if (value.startsWith("^")) {
|
||||
value = value.substring(1);
|
||||
|
@ -646,25 +845,43 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
*/
|
||||
|
||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
|
||||
int maxResult = 50000;
|
||||
jpaQuery.setMaxResults(maxResult);
|
||||
/*
|
||||
* DM 2019-08-21 - Processing slows after any ValueSets with many codes explicitly identified. This might
|
||||
* be due to the dark arts that is memory management. Will monitor but not do anything about this right now.
|
||||
*/
|
||||
BooleanQuery.setMaxClauseCount(10000);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
AtomicInteger count = new AtomicInteger(0);
|
||||
|
||||
for (Object next : jpaQuery.getResultList()) {
|
||||
int maxResultsPerBatch = 10000;
|
||||
jpaQuery.setMaxResults(maxResultsPerBatch);
|
||||
jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch);
|
||||
|
||||
ourLog.info("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch);
|
||||
|
||||
StopWatch swForBatch = new StopWatch();
|
||||
AtomicInteger countForBatch = new AtomicInteger(0);
|
||||
|
||||
List resultList = jpaQuery.getResultList();
|
||||
int resultsInBatch = resultList.size();
|
||||
int firstResult = jpaQuery.getFirstResult();
|
||||
for (Object next : resultList) {
|
||||
count.incrementAndGet();
|
||||
countForBatch.incrementAndGet();
|
||||
TermConcept concept = (TermConcept) next;
|
||||
addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter);
|
||||
}
|
||||
|
||||
ourLog.info("Batch expansion for {} with starting index of {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), firstResult, countForBatch, swForBatch.getMillis());
|
||||
|
||||
if (maxResult == count.get()) {
|
||||
throw new InternalErrorException("Expansion fragment produced too many (>= " + maxResult + ") results");
|
||||
if (resultsInBatch < maxResultsPerBatch) {
|
||||
ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis());
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
|
||||
ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis());
|
||||
|
||||
} else {
|
||||
// No codesystem matching the URL found in the database
|
||||
|
||||
|
@ -673,7 +890,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
throw new InvalidRequestException("Unknown code system: " + system);
|
||||
}
|
||||
|
||||
if (theInclude.getConcept().isEmpty() == false) {
|
||||
if (!theInclude.getConcept().isEmpty()) {
|
||||
for (ValueSet.ConceptReferenceComponent next : theInclude.getConcept()) {
|
||||
String nextCode = next.getCode();
|
||||
if (isNoneBlank(system, nextCode) && !theAddedCodes.contains(system + "|" + nextCode)) {
|
||||
|
@ -693,10 +910,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
addConceptsToList(theValueSetCodeAccumulator, theAddedCodes, system, concept, theAdd);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
} else if (hasValueSet) {
|
||||
|
||||
for (CanonicalType nextValueSet : theInclude.getValueSet()) {
|
||||
ourLog.info("Starting {} expansion around ValueSet URI: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString());
|
||||
ourLog.info("Starting {} expansion around ValueSet: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString());
|
||||
|
||||
List<VersionIndependentConcept> expanded = expandValueSet(nextValueSet.getValueAsString());
|
||||
for (VersionIndependentConcept nextConcept : expanded) {
|
||||
|
@ -715,9 +934,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
} else {
|
||||
throw new InvalidRequestException("ValueSet contains " + (theAdd ? "include" : "exclude") + " criteria with no system defined");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void expandWithoutHibernateSearch(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set<String> theAddedCodes, ValueSet.ConceptSetComponent theInclude, String theSystem, boolean theAdd, AtomicInteger theCodeCounter) {
|
||||
|
@ -736,6 +960,48 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
}
|
||||
}
|
||||
|
||||
protected ValidateCodeResult validateCodeIsInPreExpandedValueSet(
|
||||
ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Coding theCoding, CodeableConcept theCodeableConcept) {
|
||||
|
||||
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet.hasId(), "ValueSet.id is required");
|
||||
|
||||
Long valueSetId = theValueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong();
|
||||
|
||||
List<TermValueSetConcept> concepts = new ArrayList<>();
|
||||
if (isNotBlank(theCode)) {
|
||||
if (isNotBlank(theSystem)) {
|
||||
concepts = myValueSetConceptDao.findOneByValueSetIdSystemAndCode(valueSetId, theSystem, theCode);
|
||||
} else {
|
||||
concepts = myValueSetConceptDao.findOneByValueSetIdAndCode(valueSetId, theCode);
|
||||
}
|
||||
} else if (theCoding != null) {
|
||||
if (theCoding.hasSystem() && theCoding.hasCode()) {
|
||||
concepts = myValueSetConceptDao.findOneByValueSetIdSystemAndCode(valueSetId, theCoding.getSystem(), theCoding.getCode());
|
||||
}
|
||||
} else if (theCodeableConcept != null){
|
||||
for (Coding coding : theCodeableConcept.getCoding()) {
|
||||
if (coding.hasSystem() && coding.hasCode()) {
|
||||
concepts = myValueSetConceptDao.findOneByValueSetIdSystemAndCode(valueSetId, coding.getSystem(), coding.getCode());
|
||||
if (!concepts.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (TermValueSetConcept concept : concepts) {
|
||||
if (isNotBlank(theDisplay) && theDisplay.equals(concept.getDisplay())) {
|
||||
return new ValidateCodeResult(true, "Validation succeeded", concept.getDisplay());
|
||||
}
|
||||
}
|
||||
|
||||
if (!concepts.isEmpty()) {
|
||||
return new ValidateCodeResult(true, "Validation succeeded", concepts.get(0).getDisplay());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
|
||||
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
|
||||
TermConcept nextChild = nextChildLink.getChild();
|
||||
|
@ -781,7 +1047,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
*/
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_MANDATORY);
|
||||
return txTemplate.execute(t->{
|
||||
return txTemplate.execute(t -> {
|
||||
TermCodeSystemVersion csv = findCurrentCodeSystemVersionForSystem(theCodeSystem);
|
||||
return myConceptDao.findByCodeSystemAndCode(csv, theCode);
|
||||
});
|
||||
|
@ -798,7 +1064,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
StopWatch stopwatch = new StopWatch();
|
||||
|
||||
Optional<TermConcept> concept = fetchLoadedCode(theCodeSystemResourcePid, theCode);
|
||||
if (concept.isPresent() == false) {
|
||||
if (!concept.isPresent()) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
|
@ -829,7 +1095,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
Stopwatch stopwatch = Stopwatch.createStarted();
|
||||
|
||||
Optional<TermConcept> concept = fetchLoadedCode(theCodeSystemResourcePid, theCode);
|
||||
if (concept.isPresent() == false) {
|
||||
if (!concept.isPresent()) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
|
@ -1014,8 +1280,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
int maxResult = 1000;
|
||||
Page<TermConcept> concepts = myConceptDao.findResourcesRequiringReindexing(new PageRequest(0, maxResult));
|
||||
if (concepts.hasContent() == false) {
|
||||
Page<TermConcept> concepts = myConceptDao.findResourcesRequiringReindexing(PageRequest.of(0, maxResult));
|
||||
if (!concepts.hasContent()) {
|
||||
if (myChildToParentPidCache != null) {
|
||||
ourLog.info("Clearing parent concept cache");
|
||||
myNextReindexPass = System.currentTimeMillis() + DateUtils.MILLIS_PER_MINUTE;
|
||||
|
@ -1076,7 +1342,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
} else {
|
||||
return saveConcept(theConcept);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1122,28 +1388,28 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public synchronized void saveDeferred() {
|
||||
if (!myProcessDeferred) {
|
||||
if (isProcessDeferredPaused()) {
|
||||
return;
|
||||
} else if (myDeferredConcepts.isEmpty() && myConceptLinksToSaveLater.isEmpty()) {
|
||||
} else if (isNoDeferredConceptsAndNoConceptLinksToSaveLater()) {
|
||||
processReindexing();
|
||||
}
|
||||
|
||||
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
|
||||
tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) {
|
||||
if (isDeferredConceptsOrConceptLinksToSaveLater()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredConcepts();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
if (myDeferredValueSets.size() > 0) {
|
||||
if (isDeferredValueSets()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredValueSets();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
if (myDeferredConceptMaps.size() > 0) {
|
||||
if (isDeferredConceptMaps()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredConceptMaps();
|
||||
return null;
|
||||
|
@ -1152,6 +1418,42 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
|
||||
}
|
||||
|
||||
private boolean isProcessDeferredPaused() {
|
||||
return !myProcessDeferred;
|
||||
}
|
||||
|
||||
private boolean isNoDeferredConceptsAndNoConceptLinksToSaveLater() {
|
||||
return isNoDeferredConcepts() && isNoConceptLinksToSaveLater();
|
||||
}
|
||||
|
||||
private boolean isDeferredConceptsOrConceptLinksToSaveLater() {
|
||||
return isDeferredConcepts() || isConceptLinksToSaveLater();
|
||||
}
|
||||
|
||||
private boolean isDeferredConcepts() {
|
||||
return !myDeferredConcepts.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isNoDeferredConcepts() {
|
||||
return myDeferredConcepts.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isConceptLinksToSaveLater() {
|
||||
return !myConceptLinksToSaveLater.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isNoConceptLinksToSaveLater() {
|
||||
return myConceptLinksToSaveLater.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isDeferredValueSets() {
|
||||
return !myDeferredValueSets.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isDeferredConceptMaps() {
|
||||
return !myDeferredConceptMaps.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
|
||||
myApplicationContext = theApplicationContext;
|
||||
|
@ -1498,31 +1800,86 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
|||
@Scheduled(fixedDelay = 600000) // 10 minutes.
|
||||
@Override
|
||||
public synchronized void preExpandValueSetToTerminologyTables() {
|
||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||
if (isNotSafeToPreExpandValueSets()) {
|
||||
ourLog.info("Skipping scheduled pre-expansion of ValueSets while deferred entities are being loaded.");
|
||||
return;
|
||||
}
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
|
||||
while (true) {
|
||||
TermValueSet valueSetToExpand = txTemplate.execute(t -> {
|
||||
Optional<TermValueSet> optionalTermValueSet = getNextTermValueSetNotExpanded();
|
||||
if (optionalTermValueSet.isPresent()) {
|
||||
TermValueSet termValueSet = optionalTermValueSet.get();
|
||||
termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANSION_IN_PROGRESS);
|
||||
myValueSetDao.saveAndFlush(termValueSet);
|
||||
|
||||
ValueSet valueSet = getValueSetFromResourceTable(termValueSet.getResource());
|
||||
|
||||
expandValueSet(valueSet, new ValueSetConceptAccumulator(termValueSet, myValueSetConceptDao, myValueSetConceptDesignationDao));
|
||||
|
||||
termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANDED);
|
||||
myValueSetDao.saveAndFlush(termValueSet);
|
||||
if (!optionalTermValueSet.isPresent()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
TermValueSet termValueSet = optionalTermValueSet.get();
|
||||
termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS);
|
||||
return myValueSetDao.saveAndFlush(termValueSet);
|
||||
});
|
||||
if (valueSetToExpand == null) {
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
// We have a ValueSet to pre-expand.
|
||||
try {
|
||||
ValueSet valueSet = txTemplate.execute(t -> {
|
||||
TermValueSet refreshedValueSetToExpand = myValueSetDao.findById(valueSetToExpand.getId()).get();
|
||||
return getValueSetFromResourceTable(refreshedValueSetToExpand.getResource());
|
||||
});
|
||||
expandValueSet(valueSet, new ValueSetConceptAccumulator(valueSetToExpand, myValueSetConceptDao, myValueSetConceptDesignationDao));
|
||||
|
||||
// We are done with this ValueSet.
|
||||
txTemplate.execute(t -> {
|
||||
valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED);
|
||||
myValueSetDao.saveAndFlush(valueSetToExpand);
|
||||
return null;
|
||||
});
|
||||
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failed to pre-expand ValueSet: " + e.getMessage(), e);
|
||||
txTemplate.execute(t -> {
|
||||
valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND);
|
||||
myValueSetDao.saveAndFlush(valueSetToExpand);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isNotSafeToPreExpandValueSets() {
|
||||
return !isSafeToPreExpandValueSets();
|
||||
}
|
||||
|
||||
private boolean isSafeToPreExpandValueSets() {
|
||||
if (isProcessDeferredPaused()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isDeferredConcepts()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isConceptLinksToSaveLater()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isDeferredValueSets()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isDeferredConceptMaps()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected abstract ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable);
|
||||
|
||||
private Optional<TermValueSet> getNextTermValueSetNotExpanded() {
|
||||
Optional<TermValueSet> retVal = Optional.empty();
|
||||
Page<TermValueSet> page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetExpansionStatusEnum.NOT_EXPANDED);
|
||||
Slice<TermValueSet> page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED);
|
||||
|
||||
if (!page.getContent().isEmpty()) {
|
||||
retVal = Optional.of(page.getContent().get(0));
|
||||
|
|
|
@ -20,13 +20,13 @@ package ca.uhn.fhir.jpa.term;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -92,6 +92,11 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseResource expandValueSet(IBaseResource theValueSetToExpand, int theOffset, int theCount) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
@ -144,4 +149,8 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.term;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
|
@ -15,6 +16,7 @@ import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent;
|
|||
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
|
@ -29,7 +31,8 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.*;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -177,6 +180,20 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) {
|
||||
ValueSet valueSetToExpand = (ValueSet) theInput;
|
||||
|
||||
try {
|
||||
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
|
||||
valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet(valueSetToExpand);
|
||||
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4, theOffset, theCount);
|
||||
return VersionConvertor_30_40.convertValueSet(expandedR4);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) {
|
||||
ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand;
|
||||
|
@ -345,5 +362,26 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) {
|
||||
ValueSet valueSet = (ValueSet) theValueSet;
|
||||
Coding coding = (Coding) theCoding;
|
||||
CodeableConcept codeableConcept = (CodeableConcept) theCodeableConcept;
|
||||
|
||||
try {
|
||||
org.hl7.fhir.r4.model.ValueSet valueSetR4;
|
||||
valueSetR4 = VersionConvertor_30_40.convertValueSet(valueSet);
|
||||
|
||||
org.hl7.fhir.r4.model.Coding codingR4 = new org.hl7.fhir.r4.model.Coding(coding.getSystem(), coding.getCode(), coding.getDisplay());
|
||||
|
||||
org.hl7.fhir.r4.model.CodeableConcept codeableConceptR4 = new org.hl7.fhir.r4.model.CodeableConcept();
|
||||
for (Coding nestedCoding : codeableConcept.getCoding()) {
|
||||
codeableConceptR4.addCoding(new org.hl7.fhir.r4.model.Coding(nestedCoding.getSystem(), nestedCoding.getCode(), nestedCoding.getDisplay()));
|
||||
}
|
||||
|
||||
return super.validateCodeIsInPreExpandedValueSet(valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConceptR4);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,18 +2,17 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent;
|
||||
import org.hl7.fhir.r4.terminologies.ValueSetExpander;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
|
@ -137,6 +136,12 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
|
|||
return super.expandValueSet(valueSetToExpand);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) {
|
||||
ValueSet valueSetToExpand = (ValueSet) theInput;
|
||||
return super.expandValueSet(valueSetToExpand, theOffset, theCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) {
|
||||
ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand;
|
||||
|
@ -272,4 +277,11 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
|
|||
return super.lookupCode(theContext, theSystem, theCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) {
|
||||
ValueSet valueSet = (ValueSet) theValueSet;
|
||||
Coding coding = (Coding) theCoding;
|
||||
CodeableConcept codeableConcept = (CodeableConcept) theCodeableConcept;
|
||||
return super.validateCodeIsInPreExpandedValueSet(valueSet, theSystem, theCode, theDisplay, coding, codeableConcept);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,14 +2,16 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r5.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r5.model.*;
|
||||
import org.hl7.fhir.r5.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionComponent;
|
||||
import org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent;
|
||||
import org.hl7.fhir.r5.terminologies.ValueSetExpander;
|
||||
|
@ -143,6 +145,13 @@ public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements
|
|||
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) {
|
||||
org.hl7.fhir.r4.model.ValueSet valueSetToExpand = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theInput);
|
||||
org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSet(valueSetToExpand, theOffset, theCount);
|
||||
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) {
|
||||
org.hl7.fhir.r4.model.ValueSet valueSetToExpand = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theValueSetToExpand);
|
||||
|
@ -280,4 +289,19 @@ public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements
|
|||
return super.lookupCode(theContext, theSystem, theCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) {
|
||||
org.hl7.fhir.r4.model.ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theValueSet);
|
||||
|
||||
Coding coding = (Coding) theCoding;
|
||||
org.hl7.fhir.r4.model.Coding codingR4 = new org.hl7.fhir.r4.model.Coding(coding.getSystem(), coding.getCode(), coding.getDisplay());
|
||||
|
||||
CodeableConcept codeableConcept = (CodeableConcept) theCodeableConcept;
|
||||
org.hl7.fhir.r4.model.CodeableConcept codeableConceptR4 = new org.hl7.fhir.r4.model.CodeableConcept();
|
||||
for (Coding nestedCoding : codeableConcept.getCoding()) {
|
||||
codeableConceptR4.addCoding(new org.hl7.fhir.r4.model.Coding(nestedCoding.getSystem(), nestedCoding.getCode(), nestedCoding.getDisplay()));
|
||||
}
|
||||
|
||||
return super.validateCodeIsInPreExpandedValueSet(valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConceptR4);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
|
@ -44,6 +40,8 @@ public interface IHapiTerminologySvc {
|
|||
|
||||
ValueSet expandValueSet(ValueSet theValueSetToExpand);
|
||||
|
||||
ValueSet expandValueSet(ValueSet theValueSetToExpand, int theOffset, int theCount);
|
||||
|
||||
void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator);
|
||||
|
||||
/**
|
||||
|
@ -51,6 +49,11 @@ public interface IHapiTerminologySvc {
|
|||
*/
|
||||
IBaseResource expandValueSet(IBaseResource theValueSetToExpand);
|
||||
|
||||
/**
|
||||
* Version independent
|
||||
*/
|
||||
IBaseResource expandValueSet(IBaseResource theValueSetToExpand, int theOffset, int theCount);
|
||||
|
||||
void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator);
|
||||
|
||||
List<VersionIndependentConcept> expandValueSet(String theValueSet);
|
||||
|
@ -109,4 +112,9 @@ public interface IHapiTerminologySvc {
|
|||
AtomicInteger applyDeltaCodesystemsRemove(String theSystem, CodeSystem theDelta);
|
||||
|
||||
void preExpandValueSetToTerminologyTables();
|
||||
|
||||
/**
|
||||
* Version independent
|
||||
*/
|
||||
ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,9 @@ import org.apache.commons.lang3.Validate;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.*;
|
||||
|
@ -41,6 +43,7 @@ import java.util.Map.Entry;
|
|||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
|
@ -67,26 +70,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
||||
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
||||
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
||||
|
||||
public static final String IMGTHLA_HLA_NOM_TXT = "hla_nom.txt";
|
||||
public static final String IMGTHLA_HLA_XML = "hla.xml";
|
||||
public static final String LOINC_ANSWERLIST_FILE = "AnswerList.csv";
|
||||
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink.csv";
|
||||
public static final String LOINC_DOCUMENT_ONTOLOGY_FILE = "DocumentOntology.csv";
|
||||
public static final String LOINC_UPLOAD_PROPERTIES_FILE = "loincupload.properties";
|
||||
public static final String LOINC_FILE = "LoincTable/Loinc.csv";
|
||||
public static final String LOINC_HIERARCHY_FILE = "MultiAxialHierarchy.csv";
|
||||
public static final String LOINC_PART_FILE = "Part.csv";
|
||||
public static final String LOINC_PART_LINK_FILE = "LoincPartLink.csv";
|
||||
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping.csv";
|
||||
public static final String LOINC_RSNA_PLAYBOOK_FILE = "LoincRsnaRadiologyPlaybook.csv";
|
||||
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE = "Top2000CommonLabResultsUs.csv";
|
||||
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE = "Top2000CommonLabResultsSi.csv";
|
||||
public static final String LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE = "LoincUniversalLabOrdersValueSet.csv";
|
||||
public static final String LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV = "LoincIeeeMedicalDeviceCodeMappingTable.csv";
|
||||
public static final String LOINC_IMAGING_DOCUMENT_CODES_FILE = "ImagingDocumentCodes.csv";
|
||||
public static final String LOINC_GROUP_FILE = "Group.csv";
|
||||
public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv";
|
||||
public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv";
|
||||
|
||||
public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv";
|
||||
public static final String CUSTOM_HIERARCHY_FILE = "hierarchy.csv";
|
||||
public static final String CUSTOM_CODESYSTEM_JSON = "codesystem.json";
|
||||
|
@ -219,35 +206,61 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
@Override
|
||||
public UploadStatistics loadLoinc(List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
List<String> loincUploadPropertiesFragment = Arrays.asList(
|
||||
LOINC_UPLOAD_PROPERTIES_FILE.getCode()
|
||||
);
|
||||
descriptors.verifyMandatoryFilesExist(loincUploadPropertiesFragment);
|
||||
|
||||
Properties uploadProperties = getProperties(descriptors, LOINC_UPLOAD_PROPERTIES_FILE.getCode());
|
||||
|
||||
List<String> mandatoryFilenameFragments = Arrays.asList(
|
||||
LOINC_FILE,
|
||||
LOINC_HIERARCHY_FILE,
|
||||
LOINC_UPLOAD_PROPERTIES_FILE,
|
||||
LOINC_ANSWERLIST_FILE,
|
||||
LOINC_ANSWERLIST_LINK_FILE,
|
||||
LOINC_PART_FILE,
|
||||
LOINC_PART_LINK_FILE,
|
||||
LOINC_PART_RELATED_CODE_MAPPING_FILE,
|
||||
LOINC_DOCUMENT_ONTOLOGY_FILE,
|
||||
LOINC_RSNA_PLAYBOOK_FILE,
|
||||
LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE,
|
||||
LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE,
|
||||
LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE,
|
||||
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV,
|
||||
LOINC_IMAGING_DOCUMENT_CODES_FILE
|
||||
uploadProperties.getProperty(LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode())
|
||||
);
|
||||
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
|
||||
|
||||
List<String> optionalFilenameFragments = Arrays.asList(
|
||||
uploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()),
|
||||
uploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode())
|
||||
);
|
||||
descriptors.verifyOptionalFilesExist(optionalFilenameFragments);
|
||||
|
||||
ourLog.info("Beginning LOINC processing");
|
||||
|
||||
return processLoincFiles(descriptors, theRequestDetails);
|
||||
return processLoincFiles(descriptors, theRequestDetails, uploadProperties);
|
||||
}
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private Properties getProperties(LoadedFileDescriptors theDescriptors, String thePropertiesFile) {
|
||||
Properties retVal = new Properties();
|
||||
for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
if (next.getFilename().endsWith(thePropertiesFile)) {
|
||||
try {
|
||||
try (InputStream inputStream = next.getInputStream()) {
|
||||
retVal.load(inputStream);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to read " + thePropertiesFile, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
|
@ -444,7 +457,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
// return new UploadStatistics(conceptCount, target);
|
||||
}
|
||||
|
||||
UploadStatistics processLoincFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) {
|
||||
UploadStatistics processLoincFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails, Properties theUploadProperties) {
|
||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||
final List<ValueSet> valueSets = new ArrayList<>();
|
||||
|
@ -469,93 +482,82 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
IRecordHandler handler;
|
||||
|
||||
Properties uploadProperties = new Properties();
|
||||
for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
if (next.getFilename().endsWith("loincupload.properties")) {
|
||||
try {
|
||||
try (InputStream inputStream = next.getInputStream()) {
|
||||
uploadProperties.load(inputStream);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to read loincupload.properties", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Part file
|
||||
// Part
|
||||
handler = new LoincPartHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
Map<PartTypeAndPartName, String> partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber();
|
||||
|
||||
// Loinc Codes
|
||||
// LOINC codes
|
||||
handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber);
|
||||
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Loinc Hierarchy
|
||||
// LOINC hierarchy
|
||||
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Answer lists (ValueSets of potential answers/values for loinc "questions")
|
||||
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Answer lists (ValueSets of potential answers/values for LOINC "questions")
|
||||
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Answer list links (connects loinc observation codes to answerlist codes)
|
||||
// Answer list links (connects LOINC observation codes to answer list codes)
|
||||
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
|
||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// RSNA Playbook file
|
||||
// RSNA playbook
|
||||
// Note that this should come before the "Part Related Code Mapping"
|
||||
// file because there are some duplicate mappings between these
|
||||
// two files, and the RSNA Playbook file has more metadata
|
||||
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Part link file
|
||||
// Part link
|
||||
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Part related code mapping
|
||||
handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Document Ontology File
|
||||
handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Document ontology
|
||||
handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Top 2000 Codes - US
|
||||
handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Top 2000 codes - US
|
||||
handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Top 2000 Codes - SI
|
||||
handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Top 2000 codes - SI
|
||||
handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Universal Lab Order ValueSet
|
||||
handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Universal lab order ValueSet
|
||||
handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// IEEE Medical Device Codes
|
||||
handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// IEEE medical device codes
|
||||
handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Imaging Document Codes
|
||||
handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Imaging document codes
|
||||
handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Group File
|
||||
handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Group
|
||||
handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Group Terms File
|
||||
handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_GROUP_TERMS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Group terms
|
||||
handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Parent Group File
|
||||
handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PARENT_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
// Parent group
|
||||
handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties);
|
||||
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
IOUtils.closeQuietly(theDescriptors);
|
||||
|
||||
valueSets.add(getValueSetLoincAll());
|
||||
|
||||
for (Entry<String, TermConcept> next : code2concept.entrySet()) {
|
||||
TermConcept nextConcept = next.getValue();
|
||||
if (nextConcept.getParents().isEmpty()) {
|
||||
|
@ -573,6 +575,23 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
return new UploadStatistics(conceptCount, target);
|
||||
}
|
||||
|
||||
private ValueSet getValueSetLoincAll() {
|
||||
ValueSet retVal = new ValueSet();
|
||||
|
||||
retVal.setId("loinc-all");
|
||||
retVal.setUrl("http://loinc.org/vs");
|
||||
retVal.setVersion("1.0.0");
|
||||
retVal.setName("All LOINC codes");
|
||||
retVal.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
retVal.setDate(new Date());
|
||||
retVal.setPublisher("Regenstrief Institute, Inc.");
|
||||
retVal.setDescription("A value set that includes all LOINC codes");
|
||||
retVal.setCopyright("This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/");
|
||||
retVal.getCompose().addInclude().setSystem(IHapiTerminologyLoaderSvc.LOINC_URI);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private UploadStatistics processSnomedCtFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) {
|
||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
final Map<String, TermConcept> id2concept = new HashMap<>();
|
||||
|
@ -732,7 +751,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
private void verifyOptionalFilesExist(List<String> theExpectedFilenameFragments) {
|
||||
List<String> notFound = notFound(theExpectedFilenameFragments);
|
||||
if (!notFound.isEmpty()) {
|
||||
ourLog.warn("Could not find the following optional file: " + notFound);
|
||||
ourLog.warn("Could not find the following optional files: " + notFound);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
|||
}
|
||||
|
||||
// Get existing entity so it can be deleted.
|
||||
Optional<TermValueSetConcept> optionalConcept = myValueSetConceptDao.findByValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode);
|
||||
Optional<TermValueSetConcept> optionalConcept = myValueSetConceptDao.findByTermValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode);
|
||||
|
||||
if (optionalConcept.isPresent()) {
|
||||
TermValueSetConcept concept = optionalConcept.get();
|
||||
|
@ -103,9 +103,8 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
|||
}
|
||||
myValueSetConceptDao.save(concept);
|
||||
|
||||
if (myConceptsSaved++ % 250 == 0) {
|
||||
if (myConceptsSaved++ % 250 == 0) { // TODO: DM 2019-08-23 - This message never appears in the log. Fix it!
|
||||
ourLog.info("Have pre-expanded {} concepts in ValueSet[{}]", myConceptsSaved, myTermValueSet.getUrl());
|
||||
myValueSetConceptDao.flush();
|
||||
}
|
||||
|
||||
return concept;
|
||||
|
@ -116,6 +115,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
|||
|
||||
TermValueSetConceptDesignation designation = new TermValueSetConceptDesignation();
|
||||
designation.setConcept(theConcept);
|
||||
designation.setValueSet(myTermValueSet);
|
||||
designation.setLanguage(theDesignation.getLanguage());
|
||||
if (isNoneBlank(theDesignation.getUseSystem(), theDesignation.getUseCode())) {
|
||||
designation.setUseSystem(theDesignation.getUseSystem());
|
||||
|
@ -127,9 +127,8 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
|||
designation.setValue(theDesignation.getValue());
|
||||
myValueSetConceptDesignationDao.save(designation);
|
||||
|
||||
if (myDesignationsSaved++ % 250 == 0) {
|
||||
ourLog.info("Have pre-expanded {} designations in ValueSet[{}]", myDesignationsSaved, myTermValueSet.getUrl());
|
||||
myValueSetConceptDesignationDao.flush();
|
||||
if (myDesignationsSaved++ % 250 == 0) { // TODO: DM 2019-08-23 - This message never appears in the log. Fix it!
|
||||
ourLog.info("Have pre-expanded {} designations for Concept[{}|{}] in ValueSet[{}]", myDesignationsSaved, theConcept.getSystem(), theConcept.getCode(), myTermValueSet.getUrl());
|
||||
}
|
||||
|
||||
return designation;
|
||||
|
|
|
@ -22,15 +22,23 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||
import ca.uhn.fhir.model.api.annotation.Block;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
@Block()
|
||||
public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.ValueSetExpansionComponent implements IValueSetConceptAccumulator {
|
||||
private final int myMaxResults = 50000;
|
||||
private int myConceptsCount;
|
||||
|
||||
public ValueSetExpansionComponentWithConceptAccumulator() {
|
||||
myConceptsCount = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void includeConcept(String theSystem, String theCode, String theDisplay) {
|
||||
incrementConceptsCount();
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = this.addContains();
|
||||
contains.setSystem(theSystem);
|
||||
contains.setCode(theCode);
|
||||
|
@ -39,6 +47,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V
|
|||
|
||||
@Override
|
||||
public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection<TermConceptDesignation> theDesignations) {
|
||||
incrementConceptsCount();
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = this.addContains();
|
||||
contains.setSystem(theSystem);
|
||||
contains.setCode(theCode);
|
||||
|
@ -65,4 +74,10 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V
|
|||
theSystem.equals(t.getSystem()) &&
|
||||
theCode.equals(t.getCode()));
|
||||
}
|
||||
|
||||
private void incrementConceptsCount() {
|
||||
if (++myConceptsCount > myMaxResults) {
|
||||
throw new InternalErrorException("Expansion produced too many (>= " + myMaxResults + ") results");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*;
|
||||
import static org.apache.commons.lang3.StringUtils.*;
|
||||
|
||||
public abstract class BaseLoincHandler implements IRecordHandler {
|
||||
|
@ -114,7 +115,7 @@ public abstract class BaseLoincHandler implements IRecordHandler {
|
|||
conceptMap.setId(theMapping.getConceptMapId());
|
||||
conceptMap.setUrl(theMapping.getConceptMapUri());
|
||||
conceptMap.setName(theMapping.getConceptMapName());
|
||||
conceptMap.setVersion(myUploadProperties.getProperty("conceptmap.version"));
|
||||
conceptMap.setVersion(myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()));
|
||||
conceptMap.setPublisher(REGENSTRIEF_INSTITUTE_INC);
|
||||
conceptMap.addContact()
|
||||
.setName(REGENSTRIEF_INSTITUTE_INC)
|
||||
|
|
|
@ -31,7 +31,9 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.*;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
public class LoincAnswerListHandler extends BaseLoincHandler {
|
||||
|
||||
|
@ -72,7 +74,7 @@ public class LoincAnswerListHandler extends BaseLoincHandler {
|
|||
}
|
||||
|
||||
// Answer list ValueSet
|
||||
ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, "answerlist.version");
|
||||
ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, LOINC_ANSWERLIST_VERSION.getCode());
|
||||
if (vs.getIdentifier().isEmpty()) {
|
||||
vs.addIdentifier()
|
||||
.setSystem("urn:ietf:rfc:3986")
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
|
@ -52,6 +53,18 @@ public class LoincHierarchyHandler implements IRecordHandler {
|
|||
TermConcept child = getOrCreate(childCode, childCodeText);
|
||||
|
||||
parent.addChild(child, TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||
|
||||
parent.addPropertyCoding(
|
||||
"child",
|
||||
IHapiTerminologyLoaderSvc.LOINC_URI,
|
||||
child.getCode(),
|
||||
child.getDisplay());
|
||||
|
||||
child.addPropertyCoding(
|
||||
"parent",
|
||||
IHapiTerminologyLoaderSvc.LOINC_URI,
|
||||
parent.getCode(),
|
||||
parent.getDisplay());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,142 @@
|
|||
package ca.uhn.fhir.jpa.term.loinc;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This enum is used to facilitate configurable filenames when uploading LOINC.
|
||||
*/
|
||||
public enum LoincUploadPropertiesEnum {
|
||||
/**
|
||||
* Sorting agnostic.
|
||||
*/
|
||||
|
||||
LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"),
|
||||
|
||||
/*
|
||||
* MANDATORY
|
||||
*/
|
||||
// Answer lists (ValueSets of potential answers/values for LOINC "questions")
|
||||
LOINC_ANSWERLIST_FILE("loinc.answerlist.file"),
|
||||
LOINC_ANSWERLIST_FILE_DEFAULT("AnswerList.csv"),
|
||||
// Answer list links (connects LOINC observation codes to answer list codes)
|
||||
LOINC_ANSWERLIST_LINK_FILE("loinc.answerlist.link.file"),
|
||||
LOINC_ANSWERLIST_LINK_FILE_DEFAULT("LoincAnswerListLink.csv"),
|
||||
|
||||
// Document ontology
|
||||
LOINC_DOCUMENT_ONTOLOGY_FILE("loinc.document.ontology.file"),
|
||||
LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT("DocumentOntology.csv"),
|
||||
|
||||
// LOINC codes
|
||||
LOINC_FILE("loinc.file"),
|
||||
LOINC_FILE_DEFAULT("LoincTable/Loinc.csv"),
|
||||
|
||||
// LOINC hierarchy
|
||||
LOINC_HIERARCHY_FILE("loinc.hierarchy.file"),
|
||||
LOINC_HIERARCHY_FILE_DEFAULT("MultiAxialHierarchy.csv"),
|
||||
|
||||
// IEEE medical device codes
|
||||
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE("loinc.ieee.medical.device.code.mapping.table.file"),
|
||||
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT("LoincIeeeMedicalDeviceCodeMappingTable.csv"),
|
||||
|
||||
// Imaging document codes
|
||||
LOINC_IMAGING_DOCUMENT_CODES_FILE("loinc.imaging.document.codes.file"),
|
||||
LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT("ImagingDocumentCodes.csv"),
|
||||
|
||||
// Part
|
||||
LOINC_PART_FILE("loinc.part.file"),
|
||||
LOINC_PART_FILE_DEFAULT("Part.csv"),
|
||||
// Part link
|
||||
LOINC_PART_LINK_FILE("loinc.part.link.file"),
|
||||
LOINC_PART_LINK_FILE_DEFAULT("LoincPartLink.csv"),
|
||||
// Part related code mapping
|
||||
LOINC_PART_RELATED_CODE_MAPPING_FILE("loinc.part.related.code.mapping.file"),
|
||||
LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT("PartRelatedCodeMapping.csv"),
|
||||
|
||||
// RSNA playbook
|
||||
LOINC_RSNA_PLAYBOOK_FILE("loinc.rsna.playbook.file"),
|
||||
LOINC_RSNA_PLAYBOOK_FILE_DEFAULT("LoincRsnaRadiologyPlaybook.csv"),
|
||||
|
||||
// Top 2000 codes - SI
|
||||
LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE("loinc.top2000.common.lab.results.si.file"),
|
||||
LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT("Top2000CommonLabResultsSi.csv"),
|
||||
// Top 2000 codes - US
|
||||
LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE("loinc.top2000.common.lab.results.us.file"),
|
||||
LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT("Top2000CommonLabResultsUs.csv"),
|
||||
|
||||
// Universal lab order ValueSet
|
||||
LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE("loinc.universal.lab.order.valueset.file"),
|
||||
LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT("LoincUniversalLabOrdersValueSet.csv"),
|
||||
|
||||
/*
|
||||
* OPTIONAL
|
||||
*/
|
||||
// This is the version identifier for the answer list file
|
||||
LOINC_ANSWERLIST_VERSION("loinc.answerlist.version"),
|
||||
|
||||
// This is the version identifier for uploaded ConceptMap resources
|
||||
LOINC_CONCEPTMAP_VERSION("loinc.conceptmap.version"),
|
||||
|
||||
// Group
|
||||
LOINC_GROUP_FILE("loinc.group.file"),
|
||||
LOINC_GROUP_FILE_DEFAULT("Group.csv"),
|
||||
// Group terms
|
||||
LOINC_GROUP_TERMS_FILE("loinc.group.terms.file"),
|
||||
LOINC_GROUP_TERMS_FILE_DEFAULT("GroupLoincTerms.csv"),
|
||||
|
||||
// Parent group
|
||||
LOINC_PARENT_GROUP_FILE("loinc.parent.group.file"),
|
||||
LOINC_PARENT_GROUP_FILE_DEFAULT("ParentGroup.csv");
|
||||
|
||||
private static Map<String, LoincUploadPropertiesEnum> ourValues;
|
||||
private String myCode;
|
||||
|
||||
LoincUploadPropertiesEnum(String theCode) {
|
||||
myCode = theCode;
|
||||
}
|
||||
|
||||
public String getCode() {
|
||||
return myCode;
|
||||
}
|
||||
|
||||
public static LoincUploadPropertiesEnum fromCode(String theCode) {
|
||||
if (ourValues == null) {
|
||||
HashMap<String, LoincUploadPropertiesEnum> values = new HashMap<String, LoincUploadPropertiesEnum>();
|
||||
for (LoincUploadPropertiesEnum next : values()) {
|
||||
values.put(next.getCode(), next);
|
||||
}
|
||||
ourValues = Collections.unmodifiableMap(values);
|
||||
}
|
||||
return ourValues.get(theCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from Enum ordinal to Enum type.
|
||||
*
|
||||
* Usage:
|
||||
*
|
||||
* <code>LoincUploadPropertiesEnum loincUploadPropertiesEnum = LoincUploadPropertiesEnum.values[ordinal];</code>
|
||||
*/
|
||||
public static final LoincUploadPropertiesEnum values[] = values();
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import ca.uhn.fhir.i18n.HapiLocalizer;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TermValueSetPreExpansionStatusEnumTest {
|
||||
@Test
|
||||
public void testHaveDescriptions() {
|
||||
HapiLocalizer localizer = new HapiLocalizer();
|
||||
|
||||
for (TermValueSetPreExpansionStatusEnum next : TermValueSetPreExpansionStatusEnum.values()) {
|
||||
String key = "ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum." + next.getCode();
|
||||
String msg = localizer.getMessage(key);
|
||||
if (msg.equals(HapiLocalizer.UNKNOWN_I18N_KEY_MESSAGE)) {
|
||||
fail("No value for key: " + key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -397,7 +397,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
|
||||
@Test
|
||||
public void testExpandInvalidParams() throws IOException {
|
||||
//@formatter:off
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
|
@ -407,11 +406,9 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request", e.getMessage());
|
||||
assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request.", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
//@formatter:off
|
||||
try {
|
||||
ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-dstu3.xml");
|
||||
ourClient
|
||||
|
@ -425,9 +422,7 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options.", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
//@formatter:off
|
||||
try {
|
||||
ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-dstu3.xml");
|
||||
ourClient
|
||||
|
@ -441,8 +436,30 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options.", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
.onInstance(myExtensionalVsId)
|
||||
.named("expand")
|
||||
.withParameter(Parameters.class, "offset", new IntegerType(-1))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: offset parameter for $expand operation must be >= 0 when specified. offset: -1", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
.onInstance(myExtensionalVsId)
|
||||
.named("expand")
|
||||
.withParameter(Parameters.class, "count", new IntegerType(-1))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: count parameter for $expand operation must be >= 0 when specified. count: -1", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -4,11 +4,19 @@ import static org.hamcrest.Matchers.*;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||
|
@ -180,6 +188,50 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
|
|||
assertThat(actualIds, contains(ids.subList(5, 10).toArray(new String[0])));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPatchUsingJsonPatch_Transaction() throws Exception {
|
||||
String methodName = "testPatchUsingJsonPatch_Transaction";
|
||||
IIdType pid1;
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
patient.addIdentifier().setSystem("urn:system").setValue("0");
|
||||
patient.addName().setFamily(methodName).addGiven("Joe");
|
||||
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String patchString = "[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]";
|
||||
Binary patch = new Binary();
|
||||
patch.setContentType(ca.uhn.fhir.rest.api.Constants.CT_JSON_PATCH);
|
||||
patch.setContent(patchString.getBytes(Charsets.UTF_8));
|
||||
|
||||
// Note that we don't set the type
|
||||
Bundle input = new Bundle();
|
||||
input.setType(Bundle.BundleType.TRANSACTION);
|
||||
input.addEntry()
|
||||
.setFullUrl(pid1.getValue())
|
||||
.setResource(patch)
|
||||
.getRequest().setUrl(pid1.getValue());
|
||||
|
||||
HttpPost post = new HttpPost(ourServerBase);
|
||||
String encodedRequest = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input);
|
||||
ourLog.info("Requet:\n{}", encodedRequest);
|
||||
post.setEntity(new StringEntity(encodedRequest, ContentType.parse(ca.uhn.fhir.rest.api.Constants.CT_FHIR_JSON_NEW+ Constants.CHARSET_UTF8_CTSUFFIX)));
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(post)) {
|
||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(responseString);
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertThat(responseString, containsString("\"resourceType\":\"Bundle\""));
|
||||
}
|
||||
|
||||
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
||||
assertEquals("2", newPt.getIdElement().getVersionIdPart());
|
||||
assertEquals(false, newPt.getActive());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testTransactionWithGetHardLimitLargeSynchronous() {
|
||||
List<String> ids = create20Patients();
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
|
@ -20,6 +18,7 @@ import java.util.List;
|
|||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.junit.Assert.*;
|
||||
|
@ -37,24 +36,24 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
|||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||
|
||||
addFile(zos, "loincupload.properties");
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_RSNA_PLAYBOOK_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE);
|
||||
addFile(zos, LOINC_UPLOAD_PROPERTIES_FILE.getCode());
|
||||
addFile(zos, LOINC_PART_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_HIERARCHY_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_ANSWERLIST_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_GROUP_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_GROUP_TERMS_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_PARENT_GROUP_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_PART_LINK_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode());
|
||||
addFile(zos, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode());
|
||||
|
||||
zos.close();
|
||||
|
||||
|
|
|
@ -264,7 +264,6 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test {
|
|||
|
||||
@Test
|
||||
public void testExpandInvalidParams() throws IOException {
|
||||
//@formatter:off
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
|
@ -274,11 +273,9 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test {
|
|||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request", e.getMessage());
|
||||
assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request.", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
//@formatter:off
|
||||
try {
|
||||
ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/r4/extensional-case-r4.xml");
|
||||
ourClient
|
||||
|
@ -292,9 +289,7 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test {
|
|||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
//@formatter:off
|
||||
try {
|
||||
ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/r4/extensional-case.xml");
|
||||
ourClient
|
||||
|
@ -308,8 +303,30 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test {
|
|||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
.onInstance(myExtensionalVsId)
|
||||
.named("expand")
|
||||
.withParameter(Parameters.class, "offset", new IntegerType(-1))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: offset parameter for $expand operation must be >= 0 when specified. offset: -1", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
.onInstance(myExtensionalVsId)
|
||||
.named("expand")
|
||||
.withParameter(Parameters.class, "count", new IntegerType(-1))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: count parameter for $expand operation must be >= 0 when specified. count: -1", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
||||
import ca.uhn.fhir.jpa.term.loinc.*;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
|
@ -19,8 +20,11 @@ import org.mockito.Captor;
|
|||
import org.mockito.Mock;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
|
@ -252,6 +256,22 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest {
|
|||
assertEquals("42176-8", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
|
||||
assertEquals("1,3 beta glucan [Mass/volume] in Serum", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
|
||||
|
||||
// All LOINC codes
|
||||
assertTrue(valueSets.containsKey("loinc-all"));
|
||||
vs = valueSets.get("loinc-all");
|
||||
assertEquals("http://loinc.org/vs", vs.getUrl());
|
||||
assertEquals("1.0.0", vs.getVersion());
|
||||
assertEquals("All LOINC codes", vs.getName());
|
||||
assertEquals(Enumerations.PublicationStatus.ACTIVE, vs.getStatus());
|
||||
assertTrue(vs.hasDate());
|
||||
assertEquals("Regenstrief Institute, Inc.", vs.getPublisher());
|
||||
assertEquals("A value set that includes all LOINC codes", vs.getDescription());
|
||||
assertEquals("This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/", vs.getCopyright());
|
||||
assertTrue(vs.hasCompose());
|
||||
assertTrue(vs.getCompose().hasInclude());
|
||||
assertEquals(1, vs.getCompose().getInclude().size());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, vs.getCompose().getInclude().get(0).getSystem());
|
||||
|
||||
// IEEE Medical Device Codes
|
||||
conceptMap = conceptMaps.get(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_ID);
|
||||
ourLog.debug(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
||||
|
@ -322,8 +342,8 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest {
|
|||
|
||||
@Test
|
||||
public void testLoadLoincMissingMandatoryFiles() throws IOException {
|
||||
myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE);
|
||||
myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
|
||||
myFiles.addFileZip("/loinc/", LOINC_UPLOAD_PROPERTIES_FILE.getCode());
|
||||
myFiles.addFileZip("/loinc/", LOINC_GROUP_FILE_DEFAULT.getCode());
|
||||
|
||||
// Actually do the load
|
||||
try {
|
||||
|
@ -336,26 +356,129 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
public static void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles) throws IOException {
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_FILE, TerminologyLoaderSvcImpl.LOINC_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_FILE, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_RSNA_PLAYBOOK_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE);
|
||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE);
|
||||
theFiles.addFileZip("/loinc/", LOINC_UPLOAD_PROPERTIES_FILE.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_GROUP_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_GROUP_TERMS_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_PARENT_GROUP_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_FILE_DEFAULT.getCode(), LOINC_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_HIERARCHY_FILE_DEFAULT.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_ANSWERLIST_FILE_DEFAULT.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_PART_FILE_DEFAULT.getCode(), LOINC_PART_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_PART_LINK_FILE_DEFAULT.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip("/loinc/", LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLoadLoincMultiaxialHierarchySupport() throws Exception {
|
||||
addLoincMandatoryFilesToZip(myFiles);
|
||||
|
||||
// Actually do the load
|
||||
mySvc.loadLoinc(myFiles.getFiles(), mySrd);
|
||||
|
||||
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
||||
Map<String, TermConcept> concepts = extractConcepts();
|
||||
Map<String, ValueSet> valueSets = extractValueSets();
|
||||
Map<String, ConceptMap> conceptMaps = extractConceptMaps();
|
||||
|
||||
ConceptMap conceptMap;
|
||||
TermConcept code;
|
||||
ValueSet vs;
|
||||
ConceptMap.ConceptMapGroupComponent group;
|
||||
|
||||
// Normal loinc code
|
||||
code = concepts.get("10013-1");
|
||||
assertEquals("10013-1", code.getCode());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, code.getCodingProperties("PROPERTY").get(0).getSystem());
|
||||
assertEquals("LP6802-5", code.getCodingProperties("PROPERTY").get(0).getCode());
|
||||
assertEquals("Elpot", code.getCodingProperties("PROPERTY").get(0).getDisplay());
|
||||
assertEquals("EKG.MEAS", code.getStringProperty("CLASS"));
|
||||
assertEquals("R' wave amplitude in lead I", code.getDisplay());
|
||||
|
||||
// Codes with parent and child properties
|
||||
code = concepts.get("LP31755-9");
|
||||
assertEquals("LP31755-9", code.getCode());
|
||||
List<TermConceptProperty> properties = new ArrayList<>(code.getProperties());
|
||||
assertEquals(1, properties.size());
|
||||
assertEquals("child", properties.get(0).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem());
|
||||
assertEquals("LP14559-6", properties.get(0).getValue());
|
||||
assertEquals("Microorganism", properties.get(0).getDisplay());
|
||||
assertEquals(0, code.getParents().size());
|
||||
assertEquals(1, code.getChildren().size());
|
||||
|
||||
TermConcept childCode = code.getChildren().get(0).getChild();
|
||||
assertEquals("LP14559-6", childCode.getCode());
|
||||
assertEquals("Microorganism", childCode.getDisplay());
|
||||
|
||||
properties = new ArrayList<>(childCode.getProperties());
|
||||
assertEquals(2, properties.size());
|
||||
assertEquals("parent", properties.get(0).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem());
|
||||
assertEquals(code.getCode(), properties.get(0).getValue());
|
||||
assertEquals(code.getDisplay(), properties.get(0).getDisplay());
|
||||
assertEquals("child", properties.get(1).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(1).getCodeSystem());
|
||||
assertEquals("LP98185-9", properties.get(1).getValue());
|
||||
assertEquals("Bacteria", properties.get(1).getDisplay());
|
||||
assertEquals(1, childCode.getParents().size());
|
||||
assertEquals(1, childCode.getChildren().size());
|
||||
assertEquals(code.getCode(), new ArrayList<>(childCode.getParents()).get(0).getParent().getCode());
|
||||
|
||||
TermConcept nestedChildCode = childCode.getChildren().get(0).getChild();
|
||||
assertEquals("LP98185-9", nestedChildCode.getCode());
|
||||
assertEquals("Bacteria", nestedChildCode.getDisplay());
|
||||
|
||||
properties = new ArrayList<>(nestedChildCode.getProperties());
|
||||
assertEquals(2, properties.size());
|
||||
assertEquals("parent", properties.get(0).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem());
|
||||
assertEquals(childCode.getCode(), properties.get(0).getValue());
|
||||
assertEquals(childCode.getDisplay(), properties.get(0).getDisplay());
|
||||
assertEquals("child", properties.get(1).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(1).getCodeSystem());
|
||||
assertEquals("LP14082-9", properties.get(1).getValue());
|
||||
assertEquals("Bacteria", properties.get(1).getDisplay());
|
||||
assertEquals(1, nestedChildCode.getParents().size());
|
||||
assertEquals(1, nestedChildCode.getChildren().size());
|
||||
assertEquals(childCode.getCode(), new ArrayList<>(nestedChildCode.getParents()).get(0).getParent().getCode());
|
||||
|
||||
TermConcept doublyNestedChildCode = nestedChildCode.getChildren().get(0).getChild();
|
||||
assertEquals("LP14082-9", doublyNestedChildCode.getCode());
|
||||
assertEquals("Bacteria", doublyNestedChildCode.getDisplay());
|
||||
|
||||
properties = new ArrayList<>(doublyNestedChildCode.getProperties());
|
||||
assertEquals(4, properties.size());
|
||||
assertEquals("parent", properties.get(0).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem());
|
||||
assertEquals(nestedChildCode.getCode(), properties.get(0).getValue());
|
||||
assertEquals(nestedChildCode.getDisplay(), properties.get(0).getDisplay());
|
||||
assertEquals("child", properties.get(1).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(1).getCodeSystem());
|
||||
assertEquals("LP52258-8", properties.get(1).getValue());
|
||||
assertEquals("Bacteria | Body Fluid", properties.get(1).getDisplay());
|
||||
assertEquals("child", properties.get(2).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(2).getCodeSystem());
|
||||
assertEquals("LP52260-4", properties.get(2).getValue());
|
||||
assertEquals("Bacteria | Cerebral spinal fluid", properties.get(2).getDisplay());
|
||||
assertEquals("child", properties.get(3).getKey());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(3).getCodeSystem());
|
||||
assertEquals("LP52960-9", properties.get(3).getValue());
|
||||
assertEquals("Bacteria | Cervix", properties.get(3).getDisplay());
|
||||
assertEquals(1, doublyNestedChildCode.getParents().size());
|
||||
assertEquals(3, doublyNestedChildCode.getChildren().size());
|
||||
assertEquals(nestedChildCode.getCode(), new ArrayList<>(doublyNestedChildCode.getParents()).get(0).getParent().getCode());
|
||||
assertEquals("LP52258-8", doublyNestedChildCode.getChildren().get(0).getChild().getCode());
|
||||
assertEquals("LP52260-4", doublyNestedChildCode.getChildren().get(1).getChild().getCode());
|
||||
assertEquals("LP52960-9", doublyNestedChildCode.getChildren().get(2).getChild().getCode());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import ca.uhn.fhir.context.support.IContextValidationSupport;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -13,7 +14,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||
import ca.uhn.fhir.util.TestUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence;
|
||||
import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome;
|
||||
|
@ -562,6 +563,40 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteValueSet() throws Exception {
|
||||
myDaoConfig.setPreExpandValueSetsExperimental(true);
|
||||
|
||||
loadAndPersistCodeSystemAndValueSetWithDesignations();
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId);
|
||||
ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem));
|
||||
|
||||
ValueSet valueSet = myValueSetDao.read(myExtensionalVsId);
|
||||
ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
||||
ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), myDaoConfig.getPreExpandValueSetsDefaultCountExperimental());
|
||||
ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet));
|
||||
|
||||
Long termValueSetId = myTermValueSetDao.findByResourcePid(valueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong()).get().getId();
|
||||
assertEquals(3, myTermValueSetConceptDesignationDao.countByTermValueSetId(termValueSetId).intValue());
|
||||
assertEquals(24, myTermValueSetConceptDao.countByTermValueSetId(termValueSetId).intValue());
|
||||
|
||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||
myTermValueSetConceptDesignationDao.deleteByTermValueSetId(termValueSetId);
|
||||
assertEquals(0, myTermValueSetConceptDesignationDao.countByTermValueSetId(termValueSetId).intValue());
|
||||
myTermValueSetConceptDao.deleteByTermValueSetId(termValueSetId);
|
||||
assertEquals(0, myTermValueSetConceptDao.countByTermValueSetId(termValueSetId).intValue());
|
||||
myTermValueSetDao.deleteByTermValueSetId(termValueSetId);
|
||||
assertFalse(myTermValueSetDao.findByResourcePid(valueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong()).isPresent());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDuplicateCodeSystemUrls() throws Exception {
|
||||
loadAndPersistCodeSystem();
|
||||
|
@ -595,6 +630,294 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
loadAndPersistValueSet();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandTermValueSetAndChildren() throws Exception {
|
||||
myDaoConfig.setPreExpandValueSetsExperimental(true);
|
||||
|
||||
loadAndPersistCodeSystemAndValueSetWithDesignations();
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId);
|
||||
ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem));
|
||||
|
||||
ValueSet valueSet = myValueSetDao.read(myExtensionalVsId);
|
||||
ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
||||
ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), myDaoConfig.getPreExpandValueSetsDefaultCountExperimental());
|
||||
ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet));
|
||||
|
||||
assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal());
|
||||
assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset());
|
||||
assertEquals(2, expandedValueSet.getExpansion().getParameter().size());
|
||||
assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName());
|
||||
assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue());
|
||||
assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName());
|
||||
assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue());
|
||||
|
||||
assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size());
|
||||
|
||||
ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8450-9", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay());
|
||||
assertEquals(2, containsComponent.getDesignation().size());
|
||||
|
||||
ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0);
|
||||
assertEquals("nl", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue());
|
||||
|
||||
designationComponent = containsComponent.getDesignation().get(1);
|
||||
assertEquals("sv", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue());
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(1);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("11378-7", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
|
||||
// ...
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(22);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8491-3", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay());
|
||||
assertEquals(1, containsComponent.getDesignation().size());
|
||||
|
||||
designationComponent = containsComponent.getDesignation().get(0);
|
||||
assertEquals("nl", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue());
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(23);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8492-1", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandTermValueSetAndChildrenWithCount() throws Exception {
|
||||
myDaoConfig.setPreExpandValueSetsExperimental(true);
|
||||
|
||||
loadAndPersistCodeSystemAndValueSetWithDesignations();
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId);
|
||||
ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem));
|
||||
|
||||
ValueSet valueSet = myValueSetDao.read(myExtensionalVsId);
|
||||
ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
||||
ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), 23);
|
||||
ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet));
|
||||
|
||||
assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal());
|
||||
assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset());
|
||||
assertEquals(2, expandedValueSet.getExpansion().getParameter().size());
|
||||
assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName());
|
||||
assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue());
|
||||
assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName());
|
||||
assertEquals(23, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue());
|
||||
|
||||
assertEquals(23, expandedValueSet.getExpansion().getContains().size());
|
||||
|
||||
ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8450-9", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay());
|
||||
assertEquals(2, containsComponent.getDesignation().size());
|
||||
|
||||
ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0);
|
||||
assertEquals("nl", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue());
|
||||
|
||||
designationComponent = containsComponent.getDesignation().get(1);
|
||||
assertEquals("sv", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue());
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(1);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("11378-7", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
|
||||
// ...
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(22);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8491-3", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay());
|
||||
assertEquals(1, containsComponent.getDesignation().size());
|
||||
|
||||
designationComponent = containsComponent.getDesignation().get(0);
|
||||
assertEquals("nl", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandTermValueSetAndChildrenWithCountOfZero() throws Exception {
|
||||
myDaoConfig.setPreExpandValueSetsExperimental(true);
|
||||
|
||||
loadAndPersistCodeSystemAndValueSetWithDesignations();
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId);
|
||||
ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem));
|
||||
|
||||
ValueSet valueSet = myValueSetDao.read(myExtensionalVsId);
|
||||
ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
||||
ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), 0);
|
||||
ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet));
|
||||
|
||||
assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal());
|
||||
assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset());
|
||||
assertEquals(2, expandedValueSet.getExpansion().getParameter().size());
|
||||
assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName());
|
||||
assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue());
|
||||
assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName());
|
||||
assertEquals(0, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue());
|
||||
|
||||
assertFalse(expandedValueSet.getExpansion().hasContains());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandTermValueSetAndChildrenWithOffset() throws Exception {
|
||||
myDaoConfig.setPreExpandValueSetsExperimental(true);
|
||||
|
||||
loadAndPersistCodeSystemAndValueSetWithDesignations();
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId);
|
||||
ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem));
|
||||
|
||||
ValueSet valueSet = myValueSetDao.read(myExtensionalVsId);
|
||||
ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
||||
ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, 1, myDaoConfig.getPreExpandValueSetsDefaultCountExperimental());
|
||||
ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet));
|
||||
|
||||
assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal());
|
||||
assertEquals(1, expandedValueSet.getExpansion().getOffset());
|
||||
assertEquals(2, expandedValueSet.getExpansion().getParameter().size());
|
||||
assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName());
|
||||
assertEquals(1, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue());
|
||||
assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName());
|
||||
assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue());
|
||||
|
||||
assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size());
|
||||
|
||||
ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("11378-7", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(1);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8493-9", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
|
||||
// ...
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(21);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8491-3", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay());
|
||||
assertEquals(1, containsComponent.getDesignation().size());
|
||||
|
||||
ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0);
|
||||
assertEquals("nl", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue());
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(22);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8492-1", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandTermValueSetAndChildrenWithOffsetAndCount() throws Exception {
|
||||
myDaoConfig.setPreExpandValueSetsExperimental(true);
|
||||
|
||||
loadAndPersistCodeSystemAndValueSetWithDesignations();
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId);
|
||||
ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem));
|
||||
|
||||
ValueSet valueSet = myValueSetDao.read(myExtensionalVsId);
|
||||
ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
||||
ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, 1, 22);
|
||||
ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet));
|
||||
|
||||
assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal());
|
||||
assertEquals(1, expandedValueSet.getExpansion().getOffset());
|
||||
assertEquals(2, expandedValueSet.getExpansion().getParameter().size());
|
||||
assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName());
|
||||
assertEquals(1, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue());
|
||||
assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName());
|
||||
assertEquals(22, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue());
|
||||
|
||||
assertEquals(22, expandedValueSet.getExpansion().getContains().size());
|
||||
|
||||
ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("11378-7", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(1);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8493-9", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay());
|
||||
assertFalse(containsComponent.hasDesignation());
|
||||
|
||||
// ...
|
||||
|
||||
containsComponent = expandedValueSet.getExpansion().getContains().get(21);
|
||||
assertEquals("http://acme.org", containsComponent.getSystem());
|
||||
assertEquals("8491-3", containsComponent.getCode());
|
||||
assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay());
|
||||
assertEquals(1, containsComponent.getDesignation().size());
|
||||
|
||||
ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0);
|
||||
assertEquals("nl", designationComponent.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem());
|
||||
assertEquals("900000000000013009", designationComponent.getUse().getCode());
|
||||
assertEquals("Synonym", designationComponent.getUse().getDisplay());
|
||||
assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandValueSetWithValueSetCodeAccumulator() {
|
||||
createCodeSystem();
|
||||
|
@ -607,17 +930,6 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCode() {
|
||||
createCodeSystem();
|
||||
|
||||
IValidationSupport.CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null);
|
||||
assertEquals(true, validation.isOk());
|
||||
|
||||
validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ZZZZZZZ", null);
|
||||
assertEquals(false, validation.isOk());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStoreTermCodeSystemAndChildren() throws Exception {
|
||||
loadAndPersistCodeSystemWithDesignations();
|
||||
|
@ -985,7 +1297,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
});
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
@ -1003,7 +1315,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
|
||||
TermValueSetConcept concept = termValueSet.getConcepts().get(0);
|
||||
ourLog.info("Code:\n" + concept.toString());
|
||||
|
@ -1083,7 +1395,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
});
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
@ -1101,7 +1413,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
|
||||
TermValueSetConcept concept = termValueSet.getConcepts().get(0);
|
||||
ourLog.info("Code:\n" + concept.toString());
|
||||
|
@ -2291,6 +2603,67 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCode() {
|
||||
createCodeSystem();
|
||||
|
||||
CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null);
|
||||
assertEquals(true, validation.isOk());
|
||||
|
||||
validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ZZZZZZZ", null);
|
||||
assertEquals(false, validation.isOk());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCodeIsInPreExpandedValueSet() throws Exception {
|
||||
myDaoConfig.setPreExpandValueSetsExperimental(true);
|
||||
|
||||
loadAndPersistCodeSystemAndValueSetWithDesignations();
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId);
|
||||
ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem));
|
||||
|
||||
ValueSet valueSet = myValueSetDao.read(myExtensionalVsId);
|
||||
ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
|
||||
ValidateCodeResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, null, null, null, null);
|
||||
assertNull(result);
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, "BOGUS", null, null, null);
|
||||
assertNull(result);
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, "11378-7", null, null, null);
|
||||
assertTrue(result.isResult());
|
||||
assertEquals("Validation succeeded", result.getMessage());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null);
|
||||
assertTrue(result.isResult());
|
||||
assertEquals("Validation succeeded", result.getMessage());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, "http://acme.org", "11378-7", null, null, null);
|
||||
assertTrue(result.isResult());
|
||||
assertEquals("Validation succeeded", result.getMessage());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
Coding coding = new Coding("http://acme.org", "11378-7", "Systolic blood pressure at First encounter");
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, null, null, coding, null);
|
||||
assertTrue(result.isResult());
|
||||
assertEquals("Validation succeeded", result.getMessage());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
codeableConcept.addCoding(new Coding("BOGUS", "BOGUS", "BOGUS"));
|
||||
codeableConcept.addCoding(coding);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, null, null, null, codeableConcept);
|
||||
assertTrue(result.isResult());
|
||||
assertEquals("Validation succeeded", result.getMessage());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
|
|
|
@ -1,7 +1,95 @@
|
|||
#################
|
||||
### MANDATORY ###
|
||||
#################
|
||||
|
||||
# This is the version identifier for the AnswerList file
|
||||
answerlist.version=Beta.1
|
||||
# Answer lists (ValueSets of potential answers/values for LOINC "questions")
|
||||
## Default value if key not provided: AnswerList.csv
|
||||
## File must be present
|
||||
loinc.answerlist.file=AnswerList.csv
|
||||
# Answer list links (connects LOINC observation codes to answer list codes)
|
||||
## Default value if key not provided: LoincAnswerListLink.csv
|
||||
## File must be present
|
||||
loinc.answerlist.link.file=LoincAnswerListLink.csv
|
||||
|
||||
# Document ontology
|
||||
## Default value if key not provided: DocumentOntology.csv
|
||||
## File must be present
|
||||
loinc.document.ontology.file=DocumentOntology.csv
|
||||
|
||||
# LOINC codes
|
||||
## Default value if key not provided: LoincTable/Loinc.csv
|
||||
## File must be present
|
||||
loinc.file=LoincTable/Loinc.csv
|
||||
|
||||
# LOINC hierarchy
|
||||
## Default value if key not provided: MultiAxialHierarchy.csv
|
||||
## File must be present
|
||||
loinc.hierarchy.file=MultiAxialHierarchy.csv
|
||||
|
||||
# IEEE medical device codes
|
||||
## Default value if key not provided: LoincIeeeMedicalDeviceCodeMappingTable.csv
|
||||
## File must be present
|
||||
loinc.ieee.medical.device.code.mapping.table.file=LoincIeeeMedicalDeviceCodeMappingTable.csv
|
||||
|
||||
# Imaging document codes
|
||||
## Default value if key not provided: ImagingDocumentCodes.csv
|
||||
## File must be present
|
||||
loinc.imaging.document.codes.file=ImagingDocumentCodes.csv
|
||||
|
||||
# Part
|
||||
## Default value if key not provided: Part.csv
|
||||
## File must be present
|
||||
loinc.part.file=Part.csv
|
||||
# Part link
|
||||
## Default value if key not provided: LoincPartLink.csv
|
||||
## File must be present
|
||||
loinc.part.link.file=LoincPartLink.csv
|
||||
# Part related code mapping
|
||||
## Default value if key not provided: PartRelatedCodeMapping.csv
|
||||
## File must be present
|
||||
loinc.part.related.code.mapping.file=PartRelatedCodeMapping.csv
|
||||
|
||||
# RSNA playbook
|
||||
## Default value if key not provided: LoincRsnaRadiologyPlaybook.csv
|
||||
## File must be present
|
||||
loinc.rsna.playbook.file=LoincRsnaRadiologyPlaybook.csv
|
||||
|
||||
# Top 2000 codes - SI
|
||||
## Default value if key not provided: Top2000CommonLabResultsSi.csv
|
||||
## File must be present
|
||||
loinc.top2000.common.lab.results.si.file=Top2000CommonLabResultsSi.csv
|
||||
# Top 2000 codes - US
|
||||
## Default value if key not provided: Top2000CommonLabResultsUs.csv
|
||||
## File must be present
|
||||
loinc.top2000.common.lab.results.us.file=Top2000CommonLabResultsUs.csv
|
||||
|
||||
# Universal lab order ValueSet
|
||||
## Default value if key not provided: LoincUniversalLabOrdersValueSet.csv
|
||||
## File must be present
|
||||
loinc.universal.lab.order.valueset.file=LoincUniversalLabOrdersValueSet.csv
|
||||
|
||||
################
|
||||
### OPTIONAL ###
|
||||
################
|
||||
|
||||
# This is the version identifier for the answer list file
|
||||
## Key may be omitted
|
||||
loinc.answerlist.version=Beta.1
|
||||
|
||||
# This is the version identifier for uploaded ConceptMap resources
|
||||
conceptmap.version=Beta.1
|
||||
## Key may be omitted
|
||||
loinc.conceptmap.version=Beta.1
|
||||
|
||||
# Group
|
||||
## Default value if key not provided: Group.csv
|
||||
## File may be omitted
|
||||
loinc.group.file=Group.csv
|
||||
# Group terms
|
||||
## Default value if key not provided: GroupLoincTerms.csv
|
||||
## File may be omitted
|
||||
loinc.group.terms.file=GroupLoincTerms.csv
|
||||
|
||||
# Parent group
|
||||
## Default value if key not provided: ParentGroup.csv
|
||||
## File may be omitted
|
||||
loinc.parent.group.file=ParentGroup.csv
|
||||
|
|
|
@ -79,6 +79,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
resVerProv.addIndex("IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI");
|
||||
resVerProv.addIndex("IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID");
|
||||
|
||||
// TermValueSetConceptDesignation
|
||||
version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION");
|
||||
Builder.BuilderWithTableName termValueSetConceptDesignationTable = version.onTable("TRM_VALUESET_C_DESIGNATION");
|
||||
termValueSetConceptDesignationTable.addColumn("VALUESET_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
termValueSetConceptDesignationTable
|
||||
.addForeignKey("FK_TRM_VSCD_VS_PID")
|
||||
.toColumn("VALUESET_PID")
|
||||
.references("TRM_VALUESET", "PID");
|
||||
// Drop HFJ_SEARCH_RESULT foreign keys
|
||||
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("FK_SEARCHRES_RES");
|
||||
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("FK_SEARCHRES_SEARCH");
|
||||
|
|
|
@ -216,6 +216,11 @@
|
|||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -802,7 +802,6 @@ public class XmlParserDstu2Test {
|
|||
|
||||
//@formatter:off
|
||||
assertThat(enc, stringContainsInOrder("<Patient xmlns=\"http://hl7.org/fhir\">",
|
||||
"<meta>",
|
||||
"<meta>",
|
||||
"<profile value=\"http://foo/Profile1\"/>",
|
||||
"<profile value=\"http://foo/Profile2\"/>",
|
||||
|
@ -817,7 +816,6 @@ public class XmlParserDstu2Test {
|
|||
"<display value=\"label2\"/>",
|
||||
"</tag>",
|
||||
"</meta>",
|
||||
"</meta>",
|
||||
"<name>",
|
||||
"<family value=\"FAMILY\"/>",
|
||||
"</name>",
|
||||
|
@ -856,7 +854,6 @@ public class XmlParserDstu2Test {
|
|||
|
||||
//@formatter:off
|
||||
assertThat(enc, stringContainsInOrder("<Patient xmlns=\"http://hl7.org/fhir\">",
|
||||
"<meta>",
|
||||
"<meta>",
|
||||
"<tag>",
|
||||
"<system value=\"scheme1\"/>",
|
||||
|
@ -869,7 +866,6 @@ public class XmlParserDstu2Test {
|
|||
"<display value=\"label2\"/>",
|
||||
"</tag>",
|
||||
"</meta>",
|
||||
"</meta>",
|
||||
"<name>",
|
||||
"<family value=\"FAMILY\"/>",
|
||||
"</name>",
|
||||
|
|
|
@ -46,11 +46,6 @@
|
|||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
16
pom.xml
16
pom.xml
|
@ -1062,21 +1062,7 @@
|
|||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<version>3.1.6</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>java-hamcrest</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<version>4.0.0-rc1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.plexus</groupId>
|
||||
|
|
|
@ -77,6 +77,26 @@
|
|||
Two foreign keys have been dropped from the HFJ_SEARCH_RESULT table used by the FHIR search query cache. These
|
||||
constraints did not add value and caused unneccessary contention when used under high load.
|
||||
</action>
|
||||
<action type="add">
|
||||
It is now possible to submit a PATCH request as a part of a FHIR transaction in DSTU3 (previously this
|
||||
was only supported in R4+). This is not officially part of the DSTU3 spec, but it can now be performed by
|
||||
leaving the Bundle.entry.request.method blank in DSTU3 transactions and setting the request payload
|
||||
as a Binary resource containing a valid patch.
|
||||
</action>
|
||||
<action type="change" issue="1366">
|
||||
The HAPI FHIR CLI server now uses H2 as its database platform instead of Derby.
|
||||
Note that this means that data in any existing installations will need to be
|
||||
re-uploaded to the new database platform.
|
||||
</action>
|
||||
<action type="add" issue="1443">
|
||||
LOINC concepts now include multiaxial hierarchical properties (e.g. <![CDATA[<code>parent</code>]]> and
|
||||
<![CDATA[<code>child</code>]]>, which identify parent and child concepts.
|
||||
</action>
|
||||
<action type="add" issue="1445">
|
||||
When loading LOINC terminology, a new ValueSet is automatically created with a single include element that
|
||||
identifies the LOINC CodeSystem in <![CDATA[<code>ValueSet.compose.include.system</code>]]>. This ValueSet
|
||||
includes all LOINC codes.
|
||||
</action>
|
||||
</release>
|
||||
<release version="4.0.0" date="2019-08-14" description="Igloo">
|
||||
<action type="add">
|
||||
|
|
Loading…
Reference in New Issue