diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java index 4dd9ed42b22..2826f6db314 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java @@ -76,7 +76,7 @@ public class ValidateUtil { } public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage, Object... theValues) { - if (theSuccess == false) { + if (!theSuccess) { throw new InvalidRequestException(String.format(theMessage, theValues)); } } diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties index ed76533ec3b..8d9373ef09b 100644 --- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties +++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties @@ -17,8 +17,8 @@ ca.uhn.fhir.rest.client.impl.GenericClient.noPagingLinkFoundInBundle=Can not per ca.uhn.fhir.rest.client.impl.GenericClient.noVersionIdForVread=No version specified in URL for 'vread' operation: {0} ca.uhn.fhir.rest.client.impl.GenericClient.incompleteUriForRead=The given URI is not an absolute URL and is not usable for this operation: {0} ca.uhn.fhir.rest.client.impl.GenericClient.cannotDetermineResourceTypeFromUri=Unable to determine the resource type from the given URI: {0} -ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0} -ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext). +ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0} +ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext). ca.uhn.fhir.rest.server.interceptor.auth.RuleImplOp.invalidRequestBundleTypeForTransaction=Invalid request Bundle.type value for transaction: {0} @@ -46,14 +46,14 @@ ca.uhn.fhir.rest.param.BaseParam.chainNotSupported=Invalid search parameter "{0} ca.uhn.fhir.rest.server.method.ResourceParameter.invalidContentTypeInRequest=Incorrect Content-Type header value of "{0}" was provided in the request. A FHIR Content-Type is required for "{1}" operation ca.uhn.fhir.rest.server.method.ResourceParameter.noContentTypeInRequest=No Content-Type header was provided in the request. This is required for "{0}" operation -ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1} +ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1} ca.uhn.fhir.parser.ParserState.wrongResourceTypeFound=Incorrect resource type found, expected "{0}" but found "{1}" ca.uhn.fhir.rest.server.RestfulServer.getPagesNonHttpGet=Requests for _getpages must use HTTP GET ca.uhn.fhir.rest.server.RestfulServer.unknownMethod=Invalid request: The FHIR endpoint on this server does not know how to handle {0} operation[{1}] with parameters [{2}] ca.uhn.fhir.rest.server.RestfulServer.rootRequest=This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name. ca.uhn.fhir.rest.server.RestfulServer.rootRequest.multitenant=This is the base URL of a multitenant FHIR server. Unable to handle this request, as it does not contain a tenant ID. -ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body? +ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body? ca.uhn.fhir.validation.FhirValidator.noPhWarningOnStartup=Ph-schematron library not found on classpath, will not attempt to perform schematron validation ca.uhn.fhir.validation.FhirValidator.noPhError=Ph-schematron library not found on classpath, can not enable perform schematron validation ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected during validation @@ -62,22 +62,22 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request. -ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. -ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. +ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. +ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}" ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlMultipleMatches=Invalid match URL "{0}" - Multiple resources match this search ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources -ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided -ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1} +ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided +ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1} ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate index matching query: {1} (existing index belongs to {2}) ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0} -ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}" -ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided. -ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1} +ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}" +ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided. +ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1} ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\") ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists @@ -97,8 +97,8 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulDeletes=Successfully delet ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter=Unknown search parameter "{0}". Value search parameters for this search are: {1} ca.uhn.fhir.jpa.dao.TransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0} -ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation -ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request +ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation +ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.externalReferenceNotAllowed=Resource contains external reference to URL "{0}" but this server is not configured to allow external references ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1} @@ -118,8 +118,8 @@ ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascadi ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.noAttachmentDataPresent=The resource with ID {0} has no data at path: {1} -ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted. -ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1} +ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted. +ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1} ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownType=Content in resource of type {0} at path {1} is not appropriate for binary storage: {2} @@ -127,5 +127,11 @@ ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemU ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1} ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1} ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted! +ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.valueSetNotReadyForExpand=ValueSet is not ready for operation $expand; current status: {0} | {1} ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1} + +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.notExpanded=The ValueSet is waiting to be picked up and pre-expanded by a scheduled task. +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expansionInProgress=The ValueSet has been picked up by a scheduled task and pre-expansion is in progress. +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expanded=The ValueSet has been picked up by a scheduled task and pre-expansion is complete. +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.failedToExpand=The ValueSet has been picked up by a scheduled task and pre-expansion has failed. diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index 31f8770e279..5715d52ddca 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -90,28 +90,12 @@ commons-cli - - org.apache.derby - derby + com.h2database + h2 - - org.apache.derby - derbynet - - - org.apache.derby - derbyclient - - diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java index 47d81a02566..a2ceab17980 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java @@ -1,17 +1,16 @@ package ca.uhn.fhir.jpa.demo; -import java.util.Properties; - import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; -import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.time.DateUtils; +import org.hibernate.dialect.H2Dialect; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import javax.sql.DataSource; +import java.util.Properties; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -38,20 +37,20 @@ public class CommonConfig { } /** - * The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a - * directory called "jpaserver_derby_files". + * The following bean configures the database connection. The 'url' property value of "jdbc:h2:file:target./jpaserver_h2_files" indicates that the server should save resources in a + * directory called "jpaserver_h2_files". * * A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource. */ @Bean(destroyMethod = "close") public DataSource dataSource() { - String url = "jdbc:derby:directory:target/jpaserver_derby_files;create=true"; + String url = "jdbc:h2:file:./target/jpaserver_h2_files"; if (isNotBlank(ContextHolder.getDatabaseUrl())) { url = ContextHolder.getDatabaseUrl(); } BasicDataSource retVal = new BasicDataSource(); - retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver()); + retVal.setDriver(new org.h2.Driver()); retVal.setUrl(url); retVal.setUsername(""); retVal.setPassword(""); @@ -61,7 +60,7 @@ public class CommonConfig { @Bean public Properties jpaProperties() { Properties extraProperties = new Properties(); - extraProperties.put("hibernate.dialect", DerbyTenSevenHapiFhirDialect.class.getName()); + extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); extraProperties.put("hibernate.format_sql", "true"); extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java index 18ae898014a..7a7f588a72f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java @@ -152,6 +152,18 @@ public class DaoConfig { private boolean myPreExpandValueSetsExperimental = false; private boolean myFilterParameterEnabled = false; private StoreMetaSourceInformation myStoreMetaSourceInformation = StoreMetaSourceInformation.SOURCE_URI_AND_REQUEST_ID; + /** + * EXPERIMENTAL - Do not use in production! Do not change default of {@code 0}! + */ + private int myPreExpandValueSetsDefaultOffsetExperimental = 0; + /** + * EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}! + */ + private int myPreExpandValueSetsDefaultCountExperimental = 1000; + /** + * EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}! + */ + private int myPreExpandValueSetsMaxCountExperimental = 1000; /** * Constructor @@ -1707,6 +1719,86 @@ public class DaoConfig { } } + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the default value of {@code offset} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * The default value for this setting is {@code 0}. + *

+ */ + public int getPreExpandValueSetsDefaultOffsetExperimental() { + return myPreExpandValueSetsDefaultOffsetExperimental; + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public int getPreExpandValueSetsDefaultCountExperimental() { + return myPreExpandValueSetsDefaultCountExperimental; + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * If {@code thePreExpandValueSetsDefaultCountExperimental} is greater than + * {@link DaoConfig#getPreExpandValueSetsMaxCountExperimental()}, the lesser value is used. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public void setPreExpandValueSetsDefaultCountExperimental(int thePreExpandValueSetsDefaultCountExperimental) { + myPreExpandValueSetsDefaultCountExperimental = Math.min(thePreExpandValueSetsDefaultCountExperimental, getPreExpandValueSetsMaxCountExperimental()); + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public int getPreExpandValueSetsMaxCountExperimental() { + return myPreExpandValueSetsMaxCountExperimental; + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * If {@code thePreExpandValueSetsMaxCountExperimental} is lesser than + * {@link DaoConfig#getPreExpandValueSetsDefaultCountExperimental()}, the default {@code count} is lowered to the + * new max {@code count}. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public void setPreExpandValueSetsMaxCountExperimental(int thePreExpandValueSetsMaxCountExperimental) { + myPreExpandValueSetsMaxCountExperimental = thePreExpandValueSetsMaxCountExperimental; + setPreExpandValueSetsDefaultCountExperimental(Math.min(getPreExpandValueSetsDefaultCountExperimental(), getPreExpandValueSetsMaxCountExperimental())); + } + public enum IndexEnabledEnum { ENABLED, DISABLED diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java index 45148a1d9d5..1db5197f8f7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java @@ -20,15 +20,25 @@ package ca.uhn.fhir.jpa.dao; * #L% */ -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.util.*; - -import javax.annotation.PostConstruct; - +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IContextValidationSupport; +import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; +import ca.uhn.fhir.model.dstu2.composite.CodingDt; +import ca.uhn.fhir.model.dstu2.resource.ValueSet; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeInclude; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeIncludeConcept; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.ExpansionContains; +import ca.uhn.fhir.model.primitive.DateTimeDt; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.param.UriParam; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.apache.commons.codec.binary.StringUtils; import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport; import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport; @@ -38,20 +48,14 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.model.entity.BaseHasResource; -import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; -import ca.uhn.fhir.model.dstu2.composite.CodingDt; -import ca.uhn.fhir.model.dstu2.resource.ValueSet; -import ca.uhn.fhir.model.dstu2.resource.ValueSet.*; -import ca.uhn.fhir.model.primitive.DateTimeDt; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.rest.api.server.IBundleProvider; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.param.TokenParam; -import ca.uhn.fhir.rest.param.UriParam; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 implements IFhirResourceDaoValueSet, IFhirResourceDaoCodeSystem { @@ -95,7 +99,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 public ValueSet expand(IIdType theId, String theFilter, RequestDetails theRequest) { ValueSet source = loadValueSetForExpansion(theId, theRequest); return expand(source, theFilter); + } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequest) { + throw new UnsupportedOperationException(); } @Override @@ -131,6 +139,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 return retVal; } + @Override + public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + throw new UnsupportedOperationException(); + } + @Override public ValueSet expandByIdentifier(String theUri, String theFilter) { if (isBlank(theUri)) { @@ -153,7 +166,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 } return expand(source, theFilter); + } + @Override + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + throw new UnsupportedOperationException(); } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java index 1659463242f..80aff0ed58c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java @@ -27,10 +27,16 @@ public interface IFhirResourceDaoValueSet exten T expand(IIdType theId, String theFilter, RequestDetails theRequestDetails); + T expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails); + T expand(T theSource, String theFilter); + T expand(T theSource, String theFilter, int theOffset, int theCount); + T expandByIdentifier(String theUri, String theFilter); + T expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount); + void purgeCaches(); ValidateCodeResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theId, IPrimitiveType theCode, IPrimitiveType theSystem, IPrimitiveType theDisplay, CD theCoding, CC theCodeableConcept, RequestDetails theRequestDetails); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java index 9577c47b842..e225719ed4e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.dao.data; */ import ca.uhn.fhir.jpa.entity.TermValueSetConcept; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; @@ -30,11 +32,17 @@ import java.util.Optional; public interface ITermValueSetConceptDao extends JpaRepository { + @Query("SELECT COUNT(*) FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") + Integer countByTermValueSetId(@Param("pid") Long theValueSetId); + @Query("DELETE FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") @Modifying void deleteByTermValueSetId(@Param("pid") Long theValueSetId); + @Query("SELECT vsc from TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") + Slice findByTermValueSetId(Pageable thePage, @Param("pid") Long theValueSetId); + @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval") - Optional findByValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); + Optional findByTermValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java index 1a0875b5a2f..1792b8496e0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.dao.data; */ import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; @@ -28,8 +30,13 @@ import org.springframework.data.repository.query.Param; public interface ITermValueSetConceptDesignationDao extends JpaRepository { - @Query("DELETE FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myValueSet.myId = :pid") + @Query("SELECT COUNT(vscd) FROM TermValueSetConceptDesignation vscd WHERE vscd.myValueSet.myId = :pid") + Integer countByTermValueSetId(@Param("pid") Long theValueSetId); + + @Query("DELETE FROM TermValueSetConceptDesignation vscd WHERE vscd.myValueSet.myId = :pid") @Modifying void deleteByTermValueSetId(@Param("pid") Long theValueSetId); + @Query("SELECT vscd FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid") + Slice findByTermValueSetConceptId(Pageable thePage, @Param("pid") Long theValueSetConceptId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java index d04cf883041..86053789288 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java @@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.dao.data; */ import ca.uhn.fhir.jpa.entity.TermValueSet; -import ca.uhn.fhir.jpa.entity.TermValueSetExpansionStatusEnum; -import org.springframework.data.domain.Page; +import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; @@ -44,6 +44,6 @@ public interface ITermValueSetDao extends JpaRepository { Optional findByUrl(@Param("url") String theUrl); @Query("SELECT vs FROM TermValueSet vs WHERE vs.myExpansionStatus = :expansion_status") - Page findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetExpansionStatusEnum theExpansionStatus); + Slice findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetPreExpansionStatusEnum theExpansionStatus); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java index 282b1ab5a19..7c8895b06dc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java @@ -75,6 +75,12 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 return expand(source, theFilter); } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { + ValueSet source = read(theId, theRequestDetails); + return expand(source, theFilter, theOffset, theCount); + } + private ValueSet doExpand(ValueSet theSource) { validateIncludes("include", theSource.getCompose().getInclude()); @@ -105,7 +111,38 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 ValueSet retVal = outcome.getValueset(); retVal.setStatus(PublicationStatus.ACTIVE); return retVal; + } + private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) { + + validateIncludes("include", theSource.getCompose().getInclude()); + validateIncludes("exclude", theSource.getCompose().getExclude()); + + /* + * If all of the code systems are supported by the HAPI FHIR terminology service, let's + * use that as it's more efficient. + */ + + boolean allSystemsAreSuppportedByTerminologyService = true; + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + if (!myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + for (ConceptSetComponent next : theSource.getCompose().getExclude()) { + if (!myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + if (allSystemsAreSuppportedByTerminologyService) { + return (ValueSet) myTerminologySvc.expandValueSet(theSource, theOffset, theCount); + } + + HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); + ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); + ValueSet retVal = outcome.getValueset(); + retVal.setStatus(PublicationStatus.ACTIVE); + return retVal; } private void validateIncludes(String name, List listToValidate) { @@ -148,20 +185,42 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 // } // // return expand(defaultValueSet, theFilter); - } @Override - public ValueSet expand(ValueSet source, String theFilter) { + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + if (isBlank(theUri)) { + throw new InvalidRequestException("URI must not be blank or missing"); + } + + ValueSet source = new ValueSet(); + source.setUrl(theUri); + + source.getCompose().addInclude().addValueSet(theUri); + + if (isNotBlank(theFilter)) { + ConceptSetComponent include = source.getCompose().addInclude(); + ConceptSetFilterComponent filter = include.addFilter(); + filter.setProperty("display"); + filter.setOp(FilterOperator.EQUAL); + filter.setValue(theFilter); + } + + ValueSet retVal = doExpand(source, theOffset, theCount); + return retVal; + } + + @Override + public ValueSet expand(ValueSet theSource, String theFilter) { ValueSet toExpand = new ValueSet(); - // for (UriType next : source.getCompose().getInclude()) { + // for (UriType next : theSource.getCompose().getInclude()) { // ConceptSetComponent include = toExpand.getCompose().addInclude(); // include.setSystem(next.getValue()); // addFilterIfPresent(theFilter, include); // } - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -170,7 +229,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand); @@ -179,7 +238,32 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } return retVal; + } + @Override + public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { + ValueSet toExpand = new ValueSet(); + toExpand.setId(theSource.getId()); + toExpand.setUrl(theSource.getUrl()); + + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + toExpand.getCompose().addInclude(next); + addFilterIfPresent(theFilter, next); + } + + if (toExpand.getCompose().isEmpty()) { + throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); + } + + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); + + ValueSet retVal = doExpand(toExpand, theOffset, theCount); + + if (isNotBlank(theFilter)) { + applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter); + } + + return retVal; } private void applyFilter(IntegerType theTotalElement, List theContains, String theFilter) { @@ -246,9 +330,8 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } if (vs != null) { - ValueSet expansion = doExpand(vs); + ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 List contains = expansion.getExpansion().getContains(); - ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) { @@ -269,7 +352,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List contains, String theSystem, String theCode, - Coding theCoding, CodeableConcept theCodeableConcept) { + Coding theCoding, CodeableConcept theCodeableConcept) { for (ValueSetExpansionContainsComponent nextCode : contains) { ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept); if (result != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java index 0663d8cd3f7..6a525e07c0b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java @@ -70,6 +70,12 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple return expand(source, theFilter); } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { + ValueSet source = read(theId, theRequestDetails); + return expand(source, theFilter, theOffset, theCount); + } + private ValueSet doExpand(ValueSet theSource) { /* @@ -109,6 +115,32 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple // return retVal; } + private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) { + boolean allSystemsAreSuppportedByTerminologyService = true; + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + for (ConceptSetComponent next : theSource.getCompose().getExclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + if (allSystemsAreSuppportedByTerminologyService) { + return myTerminologySvc.expandValueSet(theSource, theOffset, theCount); + } + + HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); + + ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); + + ValueSet retVal = outcome.getValueset(); + retVal.setStatus(PublicationStatus.ACTIVE); + + return retVal; + } + private void validateIncludes(String name, List listToValidate) { for (ConceptSetComponent nextExclude : listToValidate) { if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) { @@ -149,20 +181,42 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple // } // // return expand(defaultValueSet, theFilter); - } @Override - public ValueSet expand(ValueSet source, String theFilter) { + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + if (isBlank(theUri)) { + throw new InvalidRequestException("URI must not be blank or missing"); + } + + ValueSet source = new ValueSet(); + source.setUrl(theUri); + + source.getCompose().addInclude().addValueSet(theUri); + + if (isNotBlank(theFilter)) { + ConceptSetComponent include = source.getCompose().addInclude(); + ConceptSetFilterComponent filter = include.addFilter(); + filter.setProperty("display"); + filter.setOp(FilterOperator.EQUAL); + filter.setValue(theFilter); + } + + ValueSet retVal = doExpand(source, theOffset, theCount); + return retVal; + } + + @Override + public ValueSet expand(ValueSet theSource, String theFilter) { ValueSet toExpand = new ValueSet(); - // for (UriType next : source.getCompose().getInclude()) { + // for (UriType next : theSource.getCompose().getInclude()) { // ConceptSetComponent include = toExpand.getCompose().addInclude(); // include.setSystem(next.getValue()); // addFilterIfPresent(theFilter, include); // } - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -171,7 +225,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand); @@ -180,7 +234,32 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } return retVal; + } + @Override + public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { + ValueSet toExpand = new ValueSet(); + toExpand.setId(theSource.getId()); + toExpand.setUrl(theSource.getUrl()); + + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + toExpand.getCompose().addInclude(next); + addFilterIfPresent(theFilter, next); + } + + if (toExpand.getCompose().isEmpty()) { + throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); + } + + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); + + ValueSet retVal = doExpand(toExpand, theOffset, theCount); + + if (isNotBlank(theFilter)) { + applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter); + } + + return retVal; } private void applyFilter(IntegerType theTotalElement, List theContains, String theFilter) { @@ -247,7 +326,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } if (vs != null) { - ValueSet expansion = doExpand(vs); + ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 List contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java index 12be4a64a15..855c8d43bcf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java @@ -70,6 +70,12 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple return expand(source, theFilter); } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { + ValueSet source = read(theId, theRequestDetails); + return expand(source, theFilter, theOffset, theCount); + } + private ValueSet doExpand(ValueSet theSource) { /* @@ -109,6 +115,38 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple // return retVal; } + private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) { + + /* + * If all of the code systems are supported by the HAPI FHIR terminology service, let's + * use that as it's more efficient. + */ + + boolean allSystemsAreSuppportedByTerminologyService = true; + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + for (ConceptSetComponent next : theSource.getCompose().getExclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + if (allSystemsAreSuppportedByTerminologyService) { + return (ValueSet) myTerminologySvc.expandValueSet(theSource, theOffset, theCount); + } + + HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); + + ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); + + ValueSet retVal = outcome.getValueset(); + retVal.setStatus(PublicationStatus.ACTIVE); + + return retVal; + } + private void validateIncludes(String name, List listToValidate) { for (ConceptSetComponent nextExclude : listToValidate) { if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) { @@ -149,20 +187,42 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple // } // // return expand(defaultValueSet, theFilter); - } @Override - public ValueSet expand(ValueSet source, String theFilter) { + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + if (isBlank(theUri)) { + throw new InvalidRequestException("URI must not be blank or missing"); + } + + ValueSet source = new ValueSet(); + source.setUrl(theUri); + + source.getCompose().addInclude().addValueSet(theUri); + + if (isNotBlank(theFilter)) { + ConceptSetComponent include = source.getCompose().addInclude(); + ConceptSetFilterComponent filter = include.addFilter(); + filter.setProperty("display"); + filter.setOp(FilterOperator.EQUAL); + filter.setValue(theFilter); + } + + ValueSet retVal = doExpand(source, theOffset, theCount); + return retVal; + } + + @Override + public ValueSet expand(ValueSet theSource, String theFilter) { ValueSet toExpand = new ValueSet(); - // for (UriType next : source.getCompose().getInclude()) { + // for (UriType next : theSource.getCompose().getInclude()) { // ConceptSetComponent include = toExpand.getCompose().addInclude(); // include.setSystem(next.getValue()); // addFilterIfPresent(theFilter, include); // } - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -171,7 +231,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand); @@ -180,7 +240,32 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } return retVal; + } + @Override + public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { + ValueSet toExpand = new ValueSet(); + toExpand.setId(theSource.getId()); + toExpand.setUrl(theSource.getUrl()); + + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + toExpand.getCompose().addInclude(next); + addFilterIfPresent(theFilter, next); + } + + if (toExpand.getCompose().isEmpty()) { + throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); + } + + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); + + ValueSet retVal = doExpand(toExpand, theOffset, theCount); + + if (isNotBlank(theFilter)) { + applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter); + } + + return retVal; } private void applyFilter(IntegerType theTotalElement, List theContains, String theFilter) { @@ -247,7 +332,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } if (vs != null) { - ValueSet expansion = doExpand(vs); + ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 List contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java index 28a60a1fef8..0ba562efe79 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java @@ -71,11 +71,11 @@ public class TermValueSet implements Serializable { @Enumerated(EnumType.STRING) @Column(name = "EXPANSION_STATUS", nullable = false, length = MAX_EXPANSION_STATUS_LENGTH) - private TermValueSetExpansionStatusEnum myExpansionStatus; + private TermValueSetPreExpansionStatusEnum myExpansionStatus; public TermValueSet() { super(); - myExpansionStatus = TermValueSetExpansionStatusEnum.NOT_EXPANDED; + myExpansionStatus = TermValueSetPreExpansionStatusEnum.NOT_EXPANDED; } public Long getId() { @@ -120,11 +120,11 @@ public class TermValueSet implements Serializable { return myConcepts; } - public TermValueSetExpansionStatusEnum getExpansionStatus() { + public TermValueSetPreExpansionStatusEnum getExpansionStatus() { return myExpansionStatus; } - public void setExpansionStatus(TermValueSetExpansionStatusEnum theExpansionStatus) { + public void setExpansionStatus(TermValueSetPreExpansionStatusEnum theExpansionStatus) { myExpansionStatus = theExpansionStatus; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java index b1ab2cff713..e3aa0c5196a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java @@ -52,6 +52,16 @@ public class TermValueSetConceptDesignation implements Serializable { @JoinColumn(name = "VALUESET_CONCEPT_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VALUESET_CONCEPT_PID")) private TermValueSetConcept myConcept; + @ManyToOne() + @JoinColumn(name = "VALUESET_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VSCD_VS_PID")) + private TermValueSet myValueSet; + + @Transient + private String myValueSetUrl; + + @Transient + private String myValueSetName; + @Column(name = "LANG", nullable = true, length = MAX_LENGTH) private String myLanguage; @@ -80,6 +90,31 @@ public class TermValueSetConceptDesignation implements Serializable { return this; } + public TermValueSet getValueSet() { + return myValueSet; + } + + public TermValueSetConceptDesignation setValueSet(TermValueSet theValueSet) { + myValueSet = theValueSet; + return this; + } + + public String getValueSetUrl() { + if (myValueSetUrl == null) { + myValueSetUrl = getValueSet().getUrl(); + } + + return myValueSetUrl; + } + + public String getValueSetName() { + if (myValueSetName == null) { + myValueSetName = getValueSet().getName(); + } + + return myValueSetName; + } + public String getLanguage() { return myLanguage; } @@ -167,6 +202,9 @@ public class TermValueSetConceptDesignation implements Serializable { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("myId", myId) .append(myConcept != null ? ("myConcept - id=" + myConcept.getId()) : ("myConcept=(null)")) + .append(myValueSet != null ? ("myValueSet - id=" + myValueSet.getId()) : ("myValueSet=(null)")) + .append("myValueSetUrl", this.getValueSetUrl()) + .append("myValueSetName", this.getValueSetName()) .append("myLanguage", myLanguage) .append("myUseSystem", myUseSystem) .append("myUseCode", myUseCode) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetExpansionStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetExpansionStatusEnum.java deleted file mode 100644 index 33cb4301097..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetExpansionStatusEnum.java +++ /dev/null @@ -1,42 +0,0 @@ -package ca.uhn.fhir.jpa.entity; - -/* - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2019 University Health Network - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * This enum is used to indicate the expansion status of a given ValueSet in the terminology tables. In this context, - * an expanded ValueSet has its included concepts stored in the terminology tables as well. - */ -public enum TermValueSetExpansionStatusEnum { - - /** - * This status indicates the ValueSet is waiting to be picked up and expanded by a scheduled task. - */ - NOT_EXPANDED, - /** - * This status indicates the ValueSet has been picked up by a scheduled task and is mid-expansion. - */ - EXPANSION_IN_PROGRESS, - /** - * This status indicates the ValueSet has been picked up by a scheduled task and expansion is complete. - */ - EXPANDED - -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java new file mode 100644 index 00000000000..83e1e0af62e --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java @@ -0,0 +1,71 @@ +package ca.uhn.fhir.jpa.entity; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * This enum is used to indicate the pre-expansion status of a given ValueSet in the terminology tables. In this context, + * an expanded ValueSet has its included concepts stored in the terminology tables as well. + */ +public enum TermValueSetPreExpansionStatusEnum { + /** + * Sorting agnostic. + */ + + NOT_EXPANDED("notExpanded"), + EXPANSION_IN_PROGRESS("expansionInProgress"), + EXPANDED("expanded"), + FAILED_TO_EXPAND("failedToExpand"); + + private static Map ourValues; + private String myCode; + + TermValueSetPreExpansionStatusEnum(String theCode) { + myCode = theCode; + } + + public String getCode() { + return myCode; + } + + public static TermValueSetPreExpansionStatusEnum fromCode(String theCode) { + if (ourValues == null) { + HashMap values = new HashMap(); + for (TermValueSetPreExpansionStatusEnum next : values()) { + values.put(next.getCode(), next); + } + ourValues = Collections.unmodifiableMap(values); + } + return ourValues.get(theCode); + } + + /** + * Convert from Enum ordinal to Enum type. + * + * Usage: + * + * TermValueSetExpansionStatusEnum termValueSetExpansionStatusEnum = TermValueSetExpansionStatusEnum.values[ordinal]; + */ + public static final TermValueSetPreExpansionStatusEnum values[] = values(); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java index e0ce7b36245..464adce1f57 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java @@ -1,9 +1,10 @@ package ca.uhn.fhir.jpa.provider; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.dao.DaoConfig; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.util.ExpungeOptions; import ca.uhn.fhir.jpa.util.ExpungeOutcome; -import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.param.DateRangeParam; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; @@ -12,6 +13,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.IntegerType; import org.hl7.fhir.r4.model.Parameters; import org.jboss.logging.MDC; +import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletRequest; import java.util.Date; @@ -42,6 +44,10 @@ import java.util.TreeSet; public class BaseJpaProvider { public static final String REMOTE_ADDR = "req.remoteAddr"; public static final String REMOTE_UA = "req.userAgent"; + + @Autowired + protected DaoConfig myDaoConfig; + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaProvider.class); private FhirContext myContext; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java index e55de8b4a51..884941f053c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java @@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDstu3 { + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetDstu3.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -46,6 +47,8 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst @OperationParam(name = "url", min = 0, max = 1) UriType theUrl, @OperationParam(name = "identifier", min = 0, max = 1) UriType theIdentifier, @OperationParam(name = "filter", min = 0, max = 1) StringType theFilter, + @OperationParam(name = "offset", min = 0, max = 1) IntegerType theOffset, + @OperationParam(name = "count", min = 0, max = 1) IntegerType theCount, RequestDetails theRequestDetails) { boolean haveId = theId != null && theId.hasIdPart(); @@ -55,27 +58,59 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst } boolean haveIdentifier = url != null && isNotBlank(url.getValue()); - boolean haveValueSet = theValueSet != null && theValueSet.isEmpty() == false; + boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty(); if (!haveId && !haveIdentifier && !haveValueSet) { - throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request"); + throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request."); } if (moreThanOneTrue(haveId, haveIdentifier, haveValueSet)) { throw new InvalidRequestException("$expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options."); } + int offset = myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(); + if (theOffset != null && theOffset.hasValue()) { + if (theOffset.getValue() >= 0) { + offset = theOffset.getValue(); + } else { + throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + } + } + + int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental(); + if (theCount != null && theCount.hasValue()) { + if (theCount.getValue() >= 0) { + count = theCount.getValue(); + } else { + throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + } + } + int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental(); + if (count > countMax) { + ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + count = countMax; + } + startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter)); + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter), offset, count); + } else { + return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + } } else { - return dao.expand(theValueSet, toFilterString(theFilter)); + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter)); + } else { + return dao.expand(theValueSet, toFilterString(theFilter)); + } } - } finally { endRequest(theServletRequest); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java index e13582a995e..544670c2a5b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java @@ -62,7 +62,7 @@ public class BaseJpaResourceProviderConceptMapR4 extends JpaResourceProviderR4 { + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetR4.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -43,31 +44,65 @@ public class BaseJpaResourceProviderValueSetR4 extends JpaResourceProviderR4= 0) { + offset = theOffset.getValue(); + } else { + throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + } + } + + int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental(); + if (theCount != null && theCount.hasValue()) { + if (theCount.getValue() >= 0) { + count = theCount.getValue(); + } else { + throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + } + } + int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental(); + if (count > countMax) { + ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + count = countMax; + } + startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count); + } else { + return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + } } else { - return dao.expand(theValueSet, toFilterString(theFilter)); + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + } else { + return dao.expand(theValueSet, toFilterString(theFilter)); + } } - } finally { endRequest(theServletRequest); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java index 65522e01231..f05a8f4cdaa 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java @@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5 { + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetR5.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -43,31 +44,65 @@ public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5= 0) { + offset = theOffset.getValue(); + } else { + throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + } + } + + int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental(); + if (theCount != null && theCount.hasValue()) { + if (theCount.getValue() >= 0) { + count = theCount.getValue(); + } else { + throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + } + } + int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental(); + if (count > countMax) { + ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + count = countMax; + } + startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count); + } else { + return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + } } else { - return dao.expand(theValueSet, toFilterString(theFilter)); + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + } else { + return dao.expand(theValueSet, toFilterString(theFilter)); + } } - } finally { endRequest(theServletRequest); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index c574f84a122..9ce1362b4fd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -351,7 +351,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, if (optionalExistingTermConceptMapById.isPresent()) { TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get(); - ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId()); + ourLog.info("Deleting existing TermConceptMap[{}] and its children...", existingTermConceptMap.getId()); for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) { for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) { @@ -368,7 +368,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId()); - ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId()); + ourLog.info("Done deleting existing TermConceptMap[{}] and its children.", existingTermConceptMap.getId()); ourLog.info("Flushing..."); myConceptMapGroupElementTargetDao.flush(); @@ -392,11 +392,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, if (optionalExistingTermValueSetById.isPresent()) { TermValueSet existingTermValueSet = optionalExistingTermValueSetById.get(); - ourLog.info("Deleting existing TermValueSet {} and its children...", existingTermValueSet.getId()); + ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId()); myValueSetConceptDesignationDao.deleteByTermValueSetId(existingTermValueSet.getId()); myValueSetConceptDao.deleteByTermValueSetId(existingTermValueSet.getId()); myValueSetDao.deleteByTermValueSetId(existingTermValueSet.getId()); - ourLog.info("Done deleting existing TermValueSet {} and its children.", existingTermValueSet.getId()); + ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId()); ourLog.info("Flushing..."); myValueSetConceptDesignationDao.flush(); @@ -420,7 +420,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, count = 0; while (true) { Slice link = theLoader.get(); - if (link.hasContent() == false) { + if (!link.hasContent()) { break; } @@ -478,28 +478,221 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return valueSet; } + @Override + @Transactional(propagation = Propagation.REQUIRED) + public ValueSet expandValueSet(ValueSet theValueSetToExpand, int theOffset, int theCount) { + ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null"); + + Optional optionalTermValueSet; + if (theValueSetToExpand.hasId()) { + optionalTermValueSet = myValueSetDao.findByResourcePid(theValueSetToExpand.getIdElement().getIdPartAsLong()); + } else if (theValueSetToExpand.hasUrl()) { + optionalTermValueSet = myValueSetDao.findByUrl(theValueSetToExpand.getUrl()); + } else { + throw new UnprocessableEntityException("ValueSet to be expanded must provide either ValueSet.id or ValueSet.url"); + } + + if (!optionalTermValueSet.isPresent()) { + throw new InvalidRequestException("ValueSet is not present in terminology tables: " + theValueSetToExpand.getUrl()); + } + + TermValueSet termValueSet = optionalTermValueSet.get(); + + validatePreExpansionStatusOfValueSetOrThrowException(termValueSet.getExpansionStatus()); + + ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent(); + expansionComponent.setIdentifier(UUID.randomUUID().toString()); + expansionComponent.setTimestamp(new Date()); + + populateExpansionComponent(expansionComponent, termValueSet, theOffset, theCount); + + ValueSet valueSet = new ValueSet(); + valueSet.setStatus(Enumerations.PublicationStatus.ACTIVE); + valueSet.setCompose(theValueSetToExpand.getCompose()); + valueSet.setExpansion(expansionComponent); + return valueSet; + } + + private void validatePreExpansionStatusOfValueSetOrThrowException(TermValueSetPreExpansionStatusEnum thePreExpansionStatus) { + if (TermValueSetPreExpansionStatusEnum.EXPANDED != thePreExpansionStatus) { + String statusMsg = myContext.getLocalizer().getMessage( + TermValueSetPreExpansionStatusEnum.class, + thePreExpansionStatus.getCode()); + String msg = myContext.getLocalizer().getMessage( + BaseHapiTerminologySvcImpl.class, + "valueSetNotReadyForExpand", + thePreExpansionStatus.name(), + statusMsg); + throw new UnprocessableEntityException(msg); + } + } + + private void populateExpansionComponent(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) { + int total = myValueSetConceptDao.countByTermValueSetId(theTermValueSet.getId()); + theExpansionComponent.setTotal(total); + theExpansionComponent.setOffset(theOffset); + theExpansionComponent.addParameter().setName("offset").setValue(new IntegerType(theOffset)); + theExpansionComponent.addParameter().setName("count").setValue(new IntegerType(theCount)); + + if (theCount == 0 || total == 0) { + return; + } + + expandConcepts(theExpansionComponent, theTermValueSet, theOffset, theCount); + } + + private void expandConcepts(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) { + int conceptsExpanded = 0; + for (int i = theOffset; i < (theOffset + theCount); i++) { + final int page = i; + Supplier> loader = () -> myValueSetConceptDao.findByTermValueSetId(PageRequest.of(page, 1), theTermValueSet.getId()); + + Slice slice = loader.get(); + if (!slice.hasContent()) { + break; + } + + for (TermValueSetConcept concept : slice.getContent()) { + ValueSet.ValueSetExpansionContainsComponent containsComponent = theExpansionComponent.addContains(); + containsComponent.setSystem(concept.getSystem()); + containsComponent.setCode(concept.getCode()); + containsComponent.setDisplay(concept.getDisplay()); + + // TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation to make this optional. + expandDesignations(theTermValueSet, concept, containsComponent); + + if (++conceptsExpanded % 250 == 0) { + ourLog.info("Have expanded {} concepts in ValueSet[{}]", conceptsExpanded, theTermValueSet.getUrl()); + } + } + + if (!slice.hasNext()) { + break; + } + } + + if (conceptsExpanded > 0) { + ourLog.info("Have expanded {} concepts in ValueSet[{}]", conceptsExpanded, theTermValueSet.getUrl()); + } + } + + private void expandDesignations(TermValueSet theValueSet, TermValueSetConcept theConcept, ValueSet.ValueSetExpansionContainsComponent theContainsComponent) { + int designationsExpanded = 0; + int index = 0; + while (true) { + final int page = index++; + Supplier> loader = () -> myValueSetConceptDesignationDao.findByTermValueSetConceptId(PageRequest.of(page, 1000), theConcept.getId()); + + Slice slice = loader.get(); + if (!slice.hasContent()) { + break; + } + + for (TermValueSetConceptDesignation designation : slice.getContent()) { + ValueSet.ConceptReferenceDesignationComponent designationComponent = theContainsComponent.addDesignation(); + designationComponent.setLanguage(designation.getLanguage()); + designationComponent.setUse(new Coding( + designation.getUseSystem(), + designation.getUseCode(), + designation.getUseDisplay())); + designationComponent.setValue(designation.getValue()); + + if (++designationsExpanded % 250 == 0) { + ourLog.info("Have expanded {} designations for Concept[{}|{}] in ValueSet[{}]", designationsExpanded, theConcept.getSystem(), theConcept.getCode(), theValueSet.getUrl()); + } + } + + if (!slice.hasNext()) { + break; + } + } + + if (designationsExpanded > 0) { + ourLog.info("Have expanded {} designations for Concept[{}|{}] in ValueSet[{}]", designationsExpanded, theConcept.getSystem(), theConcept.getCode(), theValueSet.getUrl()); + } + } + @Override @Transactional(propagation = Propagation.REQUIRED) public void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { expandValueSet(theValueSetToExpand, theValueSetCodeAccumulator, new AtomicInteger(0)); } + @SuppressWarnings("ConstantConditions") private void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, AtomicInteger theCodeCounter) { Set addedCodes = new HashSet<>(); + StopWatch sw = new StopWatch(); + String valueSetInfo = getValueSetInfo(theValueSetToExpand); + ourLog.info("Working with {}", valueSetInfo); + // Handle includes ourLog.debug("Handling includes"); for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { - boolean add = true; - expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter); + for (int i = 0; ; i++) { + int finalI = i; + Boolean shouldContinue = myTxTemplate.execute(t -> { + boolean add = true; + return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter, finalI); + }); + if (!shouldContinue) { + break; + } + } } // Handle excludes ourLog.debug("Handling excludes"); - for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) { - boolean add = false; - expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter); + for (ValueSet.ConceptSetComponent exclude : theValueSetToExpand.getCompose().getExclude()) { + for (int i = 0; ; i++) { + int finalI = i; + Boolean shouldContinue = myTxTemplate.execute(t -> { + boolean add = false; + return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, exclude, add, theCodeCounter, finalI); + }); + if (!shouldContinue) { + break; + } + } } + + ourLog.info("Done working with {} in {}ms", valueSetInfo, sw.getMillis()); + } + + private String getValueSetInfo(ValueSet theValueSet) { + StringBuilder sb = new StringBuilder(); + boolean isIdentified = false; + sb + .append("ValueSet:"); + if (theValueSet.hasId()) { + isIdentified = true; + sb + .append(" ValueSet.id[") + .append(theValueSet.getId()) + .append("]"); + } + if (theValueSet.hasUrl()) { + isIdentified = true; + sb + .append(" ValueSet.url[") + .append(theValueSet.getUrl()) + .append("]"); + } + if (theValueSet.hasIdentifier()) { + isIdentified = true; + sb + .append(" ValueSet.identifier[") + .append(theValueSet.getIdentifierFirstRep().getSystem()) + .append("|") + .append(theValueSet.getIdentifierFirstRep().getValue()) + .append("]"); + } + + if (!isIdentified) { + sb.append(" None of ValueSet.id, ValueSet.url, and ValueSet.identifier are provided."); + } + + return sb.toString(); } protected List expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.r4.model.ValueSet theValueSetToExpandR4) { @@ -513,16 +706,21 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return retVal; } - private void expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter) { + /** + * @return Returns true if there are potentially more results to process. + */ + private Boolean expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter, int theQueryIndex) { + String system = theInclude.getSystem(); boolean hasSystem = isNotBlank(system); boolean hasValueSet = theInclude.getValueSet().size() > 0; if (hasSystem) { - ourLog.info("Starting {} expansion around code system: {}", (theAdd ? "inclusion" : "exclusion"), system); + ourLog.info("Starting {} expansion around CodeSystem: {}", (theAdd ? "inclusion" : "exclusion"), system); TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system); if (cs != null) { + TermCodeSystemVersion csv = cs.getCurrentVersion(); FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); @@ -532,7 +730,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, */ if (myFulltextSearchSvc == null) { expandWithoutHibernateSearch(theValueSetCodeAccumulator, theAddedCodes, theInclude, system, theAdd, theCodeCounter); - return; + return false; } /* @@ -592,10 +790,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, String value = nextFilter.getValue(); if (value.endsWith("$")) { value = value.substring(0, value.length() - 1); - } else if (value.endsWith(".*") == false) { + } else if (!value.endsWith(".*")) { value = value + ".*"; } - if (value.startsWith("^") == false && value.startsWith(".*") == false) { + if (!value.startsWith("^") && !value.startsWith(".*")) { value = ".*" + value; } else if (value.startsWith("^")) { value = value.substring(1); @@ -646,25 +844,43 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, */ FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class); - int maxResult = 50000; - jpaQuery.setMaxResults(maxResult); + /* + * DM 2019-08-21 - Processing slows after any ValueSets with many codes explicitly identified. This might + * be due to the dark arts that is memory management. Will monitor but not do anything about this right now. + */ + BooleanQuery.setMaxClauseCount(10000); StopWatch sw = new StopWatch(); AtomicInteger count = new AtomicInteger(0); - for (Object next : jpaQuery.getResultList()) { + int maxResultsPerBatch = 10000; + jpaQuery.setMaxResults(maxResultsPerBatch); + jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch); + + ourLog.info("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch); + + StopWatch swForBatch = new StopWatch(); + AtomicInteger countForBatch = new AtomicInteger(0); + + List resultList = jpaQuery.getResultList(); + int resultsInBatch = resultList.size(); + int firstResult = jpaQuery.getFirstResult(); + for (Object next : resultList) { count.incrementAndGet(); + countForBatch.incrementAndGet(); TermConcept concept = (TermConcept) next; addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter); } + ourLog.info("Batch expansion for {} with starting index of {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), firstResult, countForBatch, swForBatch.getMillis()); - if (maxResult == count.get()) { - throw new InternalErrorException("Expansion fragment produced too many (>= " + maxResult + ") results"); + if (resultsInBatch < maxResultsPerBatch) { + ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis()); + return false; + } else { + return true; } - ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis()); - } else { // No codesystem matching the URL found in the database @@ -673,7 +889,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, throw new InvalidRequestException("Unknown code system: " + system); } - if (theInclude.getConcept().isEmpty() == false) { + if (!theInclude.getConcept().isEmpty()) { for (ValueSet.ConceptReferenceComponent next : theInclude.getConcept()) { String nextCode = next.getCode(); if (isNoneBlank(system, nextCode) && !theAddedCodes.contains(system + "|" + nextCode)) { @@ -693,10 +909,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, addConceptsToList(theValueSetCodeAccumulator, theAddedCodes, system, concept, theAdd); } + return false; } } else if (hasValueSet) { + for (CanonicalType nextValueSet : theInclude.getValueSet()) { - ourLog.info("Starting {} expansion around ValueSet URI: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString()); + ourLog.info("Starting {} expansion around ValueSet: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString()); List expanded = expandValueSet(nextValueSet.getValueAsString()); for (VersionIndependentConcept nextConcept : expanded) { @@ -715,9 +933,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } } + + return false; + } else { throw new InvalidRequestException("ValueSet contains " + (theAdd ? "include" : "exclude") + " criteria with no system defined"); } + + } private void expandWithoutHibernateSearch(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, String theSystem, boolean theAdd, AtomicInteger theCodeCounter) { @@ -781,7 +1004,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, */ TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_MANDATORY); - return txTemplate.execute(t->{ + return txTemplate.execute(t -> { TermCodeSystemVersion csv = findCurrentCodeSystemVersionForSystem(theCodeSystem); return myConceptDao.findByCodeSystemAndCode(csv, theCode); }); @@ -798,7 +1021,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, StopWatch stopwatch = new StopWatch(); Optional concept = fetchLoadedCode(theCodeSystemResourcePid, theCode); - if (concept.isPresent() == false) { + if (!concept.isPresent()) { return Collections.emptySet(); } @@ -829,7 +1052,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, Stopwatch stopwatch = Stopwatch.createStarted(); Optional concept = fetchLoadedCode(theCodeSystemResourcePid, theCode); - if (concept.isPresent() == false) { + if (!concept.isPresent()) { return Collections.emptySet(); } @@ -1014,8 +1237,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, @Override protected void doInTransactionWithoutResult(TransactionStatus theArg0) { int maxResult = 1000; - Page concepts = myConceptDao.findResourcesRequiringReindexing(new PageRequest(0, maxResult)); - if (concepts.hasContent() == false) { + Page concepts = myConceptDao.findResourcesRequiringReindexing(PageRequest.of(0, maxResult)); + if (!concepts.hasContent()) { if (myChildToParentPidCache != null) { ourLog.info("Clearing parent concept cache"); myNextReindexPass = System.currentTimeMillis() + DateUtils.MILLIS_PER_MINUTE; @@ -1076,7 +1299,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } else { return saveConcept(theConcept); } - + } /** @@ -1122,28 +1345,28 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, @Transactional(propagation = Propagation.NEVER) @Override public synchronized void saveDeferred() { - if (!myProcessDeferred) { + if (isProcessDeferredPaused()) { return; - } else if (myDeferredConcepts.isEmpty() && myConceptLinksToSaveLater.isEmpty()) { + } else if (isNoDeferredConceptsAndNoConceptLinksToSaveLater()) { processReindexing(); } TransactionTemplate tt = new TransactionTemplate(myTransactionMgr); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); - if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) { + if (isDeferredConceptsOrConceptLinksToSaveLater()) { tt.execute(t -> { processDeferredConcepts(); return null; }); } - if (myDeferredValueSets.size() > 0) { + if (isDeferredValueSets()) { tt.execute(t -> { processDeferredValueSets(); return null; }); } - if (myDeferredConceptMaps.size() > 0) { + if (isDeferredConceptMaps()) { tt.execute(t -> { processDeferredConceptMaps(); return null; @@ -1152,6 +1375,42 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } + private boolean isProcessDeferredPaused() { + return !myProcessDeferred; + } + + private boolean isNoDeferredConceptsAndNoConceptLinksToSaveLater() { + return isNoDeferredConcepts() && isNoConceptLinksToSaveLater(); + } + + private boolean isDeferredConceptsOrConceptLinksToSaveLater() { + return isDeferredConcepts() || isConceptLinksToSaveLater(); + } + + private boolean isDeferredConcepts() { + return !myDeferredConcepts.isEmpty(); + } + + private boolean isNoDeferredConcepts() { + return myDeferredConcepts.isEmpty(); + } + + private boolean isConceptLinksToSaveLater() { + return !myConceptLinksToSaveLater.isEmpty(); + } + + private boolean isNoConceptLinksToSaveLater() { + return myConceptLinksToSaveLater.isEmpty(); + } + + private boolean isDeferredValueSets() { + return !myDeferredValueSets.isEmpty(); + } + + private boolean isDeferredConceptMaps() { + return !myDeferredConceptMaps.isEmpty(); + } + @Override public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException { myApplicationContext = theApplicationContext; @@ -1498,31 +1757,86 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, @Scheduled(fixedDelay = 600000) // 10 minutes. @Override public synchronized void preExpandValueSetToTerminologyTables() { - new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { - @Override - protected void doInTransactionWithoutResult(TransactionStatus theStatus) { + if (isNotSafeToPreExpandValueSets()) { + ourLog.info("Skipping scheduled pre-expansion of ValueSets while deferred entities are being loaded."); + return; + } + TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); + + while (true) { + TermValueSet valueSetToExpand = txTemplate.execute(t -> { Optional optionalTermValueSet = getNextTermValueSetNotExpanded(); - if (optionalTermValueSet.isPresent()) { - TermValueSet termValueSet = optionalTermValueSet.get(); - termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANSION_IN_PROGRESS); - myValueSetDao.saveAndFlush(termValueSet); - - ValueSet valueSet = getValueSetFromResourceTable(termValueSet.getResource()); - - expandValueSet(valueSet, new ValueSetConceptAccumulator(termValueSet, myValueSetConceptDao, myValueSetConceptDesignationDao)); - - termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANDED); - myValueSetDao.saveAndFlush(termValueSet); + if (!optionalTermValueSet.isPresent()) { + return null; } + + TermValueSet termValueSet = optionalTermValueSet.get(); + termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); + return myValueSetDao.saveAndFlush(termValueSet); + }); + if (valueSetToExpand == null) { + return; } - }); + + // We have a ValueSet to pre-expand. + try { + ValueSet valueSet = txTemplate.execute(t -> { + TermValueSet refreshedValueSetToExpand = myValueSetDao.findById(valueSetToExpand.getId()).get(); + return getValueSetFromResourceTable(refreshedValueSetToExpand.getResource()); + }); + expandValueSet(valueSet, new ValueSetConceptAccumulator(valueSetToExpand, myValueSetConceptDao, myValueSetConceptDesignationDao)); + + // We are done with this ValueSet. + txTemplate.execute(t -> { + valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED); + myValueSetDao.saveAndFlush(valueSetToExpand); + return null; + }); + + } catch (Exception e) { + ourLog.error("Failed to pre-expand ValueSet: " + e.getMessage(), e); + txTemplate.execute(t -> { + valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND); + myValueSetDao.saveAndFlush(valueSetToExpand); + return null; + }); + } + } + } + + private boolean isNotSafeToPreExpandValueSets() { + return !isSafeToPreExpandValueSets(); + } + + private boolean isSafeToPreExpandValueSets() { + if (isProcessDeferredPaused()) { + return false; + } + + if (isDeferredConcepts()) { + return false; + } + + if (isConceptLinksToSaveLater()) { + return false; + } + + if (isDeferredValueSets()) { + return false; + } + + if (isDeferredConceptMaps()) { + return false; + } + + return true; } protected abstract ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable); private Optional getNextTermValueSetNotExpanded() { Optional retVal = Optional.empty(); - Page page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetExpansionStatusEnum.NOT_EXPANDED); + Slice page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED); if (!page.getContent().isEmpty()) { retVal = Optional.of(page.getContent().get(0)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java index 498f25dfe94..a64bb9baccf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java @@ -92,6 +92,11 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl { throw new UnsupportedOperationException(); } + @Override + public IBaseResource expandValueSet(IBaseResource theValueSetToExpand, int theOffset, int theCount) { + throw new UnsupportedOperationException(); + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { throw new UnsupportedOperationException(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java index da83782a810..4866ddd45e0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java @@ -177,6 +177,20 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen } } + @Override + public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) { + ValueSet valueSetToExpand = (ValueSet) theInput; + + try { + org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4; + valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet(valueSetToExpand); + org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4, theOffset, theCount); + return VersionConvertor_30_40.convertValueSet(expandedR4); + } catch (FHIRException e) { + throw new InternalErrorException(e); + } + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java index b764fcf5ee8..6ded7d0a469 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java @@ -137,6 +137,12 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements return super.expandValueSet(valueSetToExpand); } + @Override + public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) { + ValueSet valueSetToExpand = (ValueSet) theInput; + return super.expandValueSet(valueSetToExpand, theOffset, theCount); + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java index 2f27d7b1761..c90099016f0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java @@ -143,6 +143,13 @@ public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4); } + @Override + public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) { + org.hl7.fhir.r4.model.ValueSet valueSetToExpand = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theInput); + org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSet(valueSetToExpand, theOffset, theCount); + return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4); + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { org.hl7.fhir.r4.model.ValueSet valueSetToExpand = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theValueSetToExpand); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java index 5faf0f72320..195a4192c3a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java @@ -44,6 +44,8 @@ public interface IHapiTerminologySvc { ValueSet expandValueSet(ValueSet theValueSetToExpand); + ValueSet expandValueSet(ValueSet theValueSetToExpand, int theOffset, int theCount); + void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator); /** @@ -51,6 +53,11 @@ public interface IHapiTerminologySvc { */ IBaseResource expandValueSet(IBaseResource theValueSetToExpand); + /** + * Version independent + */ + IBaseResource expandValueSet(IBaseResource theValueSetToExpand, int theOffset, int theCount); + void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator); List expandValueSet(String theValueSet); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index 4e2e5b92ec2..9c3f6a0a3f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -71,7 +71,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { } // Get existing entity so it can be deleted. - Optional optionalConcept = myValueSetConceptDao.findByValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode); + Optional optionalConcept = myValueSetConceptDao.findByTermValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode); if (optionalConcept.isPresent()) { TermValueSetConcept concept = optionalConcept.get(); @@ -103,9 +103,8 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { } myValueSetConceptDao.save(concept); - if (myConceptsSaved++ % 250 == 0) { + if (myConceptsSaved++ % 250 == 0) { // TODO: DM 2019-08-23 - This message never appears in the log. Fix it! ourLog.info("Have pre-expanded {} concepts in ValueSet[{}]", myConceptsSaved, myTermValueSet.getUrl()); - myValueSetConceptDao.flush(); } return concept; @@ -116,6 +115,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { TermValueSetConceptDesignation designation = new TermValueSetConceptDesignation(); designation.setConcept(theConcept); + designation.setValueSet(myTermValueSet); designation.setLanguage(theDesignation.getLanguage()); if (isNoneBlank(theDesignation.getUseSystem(), theDesignation.getUseCode())) { designation.setUseSystem(theDesignation.getUseSystem()); @@ -127,9 +127,8 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { designation.setValue(theDesignation.getValue()); myValueSetConceptDesignationDao.save(designation); - if (myDesignationsSaved++ % 250 == 0) { - ourLog.info("Have pre-expanded {} designations in ValueSet[{}]", myDesignationsSaved, myTermValueSet.getUrl()); - myValueSetConceptDesignationDao.flush(); + if (myDesignationsSaved++ % 250 == 0) { // TODO: DM 2019-08-23 - This message never appears in the log. Fix it! + ourLog.info("Have pre-expanded {} designations for Concept[{}|{}] in ValueSet[{}]", myDesignationsSaved, theConcept.getSystem(), theConcept.getCode(), myTermValueSet.getUrl()); } return designation; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java index 5aa95013365..3104b452c6e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java @@ -22,15 +22,23 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; import ca.uhn.fhir.model.api.annotation.Block; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.hl7.fhir.r4.model.ValueSet; import java.util.Collection; @Block() public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.ValueSetExpansionComponent implements IValueSetConceptAccumulator { + private final int myMaxResults = 50000; + private int myConceptsCount; + + public ValueSetExpansionComponentWithConceptAccumulator() { + myConceptsCount = 0; + } @Override public void includeConcept(String theSystem, String theCode, String theDisplay) { + incrementConceptsCount(); ValueSet.ValueSetExpansionContainsComponent contains = this.addContains(); contains.setSystem(theSystem); contains.setCode(theCode); @@ -39,6 +47,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V @Override public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations) { + incrementConceptsCount(); ValueSet.ValueSetExpansionContainsComponent contains = this.addContains(); contains.setSystem(theSystem); contains.setCode(theCode); @@ -65,4 +74,10 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V theSystem.equals(t.getSystem()) && theCode.equals(t.getCode())); } + + private void incrementConceptsCount() { + if (++myConceptsCount > myMaxResults) { + throw new InternalErrorException("Expansion produced too many (>= " + myMaxResults + ") results"); + } + } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnumTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnumTest.java new file mode 100644 index 00000000000..a9b8f365ca4 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnumTest.java @@ -0,0 +1,21 @@ +package ca.uhn.fhir.jpa.entity; + +import ca.uhn.fhir.i18n.HapiLocalizer; +import org.junit.Test; + +import static org.junit.Assert.fail; + +public class TermValueSetPreExpansionStatusEnumTest { + @Test + public void testHaveDescriptions() { + HapiLocalizer localizer = new HapiLocalizer(); + + for (TermValueSetPreExpansionStatusEnum next : TermValueSetPreExpansionStatusEnum.values()) { + String key = "ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum." + next.getCode(); + String msg = localizer.getMessage(key); + if (msg.equals(HapiLocalizer.UNKNOWN_I18N_KEY_MESSAGE)) { + fail("No value for key: " + key); + } + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java index af7fcd14fac..c789f0a82b9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java @@ -397,7 +397,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 @Test public void testExpandInvalidParams() throws IOException { - //@formatter:off try { ourClient .operation() @@ -407,11 +406,9 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 .execute(); fail(); } catch (InvalidRequestException e) { - assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request", e.getMessage()); + assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-dstu3.xml"); ourClient @@ -425,9 +422,7 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-dstu3.xml"); ourClient @@ -441,8 +436,30 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "offset", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: offset parameter for $expand operation must be >= 0 when specified. offset: -1", e.getMessage()); + } + + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "count", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: count parameter for $expand operation must be >= 0 when specified. count: -1", e.getMessage()); + } } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java index a6e1011f511..7a69fd83b55 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java @@ -264,7 +264,6 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { @Test public void testExpandInvalidParams() throws IOException { - //@formatter:off try { ourClient .operation() @@ -274,11 +273,9 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { .execute(); fail(); } catch (InvalidRequestException e) { - assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request", e.getMessage()); + assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/r4/extensional-case-r4.xml"); ourClient @@ -292,9 +289,7 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/r4/extensional-case.xml"); ourClient @@ -308,8 +303,30 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "offset", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: offset parameter for $expand operation must be >= 0 when specified. offset: -1", e.getMessage()); + } + + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "count", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: count parameter for $expand operation must be >= 0 when specified. count: -1", e.getMessage()); + } } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index fef85b39cc8..22a42682d9e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -562,6 +562,40 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { } + @Test + public void testDeleteValueSet() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), myDaoConfig.getPreExpandValueSetsDefaultCountExperimental()); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + Long termValueSetId = myTermValueSetDao.findByResourcePid(valueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong()).get().getId(); + assertEquals(3, myTermValueSetConceptDesignationDao.countByTermValueSetId(termValueSetId).intValue()); + assertEquals(24, myTermValueSetConceptDao.countByTermValueSetId(termValueSetId).intValue()); + + new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { + @Override + protected void doInTransactionWithoutResult(TransactionStatus theStatus) { + myTermValueSetConceptDesignationDao.deleteByTermValueSetId(termValueSetId); + assertEquals(0, myTermValueSetConceptDesignationDao.countByTermValueSetId(termValueSetId).intValue()); + myTermValueSetConceptDao.deleteByTermValueSetId(termValueSetId); + assertEquals(0, myTermValueSetConceptDao.countByTermValueSetId(termValueSetId).intValue()); + myTermValueSetDao.deleteByTermValueSetId(termValueSetId); + assertFalse(myTermValueSetDao.findByResourcePid(valueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong()).isPresent()); + } + }); + } + @Test public void testDuplicateCodeSystemUrls() throws Exception { loadAndPersistCodeSystem(); @@ -572,6 +606,14 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { loadAndPersistCodeSystem(); } + @Test + public void testTest() { + ourLog.info("as is: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); + ourLog.info("toString: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.toString()); + ourLog.info("name: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.name()); + ourLog.info("getCode: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.getCode()); + } + @Test public void testDuplicateConceptMapUrls() { createAndPersistConceptMap(); @@ -595,6 +637,294 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { loadAndPersistValueSet(); } + @Test + public void testExpandTermValueSetAndChildren() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), myDaoConfig.getPreExpandValueSetsDefaultCountExperimental()); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8450-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); + assertEquals(2, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + + designationComponent = containsComponent.getDesignation().get(1); + assertEquals("sv", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(22); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(23); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8492-1", containsComponent.getCode()); + assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithCount() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), 23); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(23, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + + assertEquals(23, expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8450-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); + assertEquals(2, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + + designationComponent = containsComponent.getDesignation().get(1); + assertEquals("sv", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(22); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithCountOfZero() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), 0); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + + assertFalse(expandedValueSet.getExpansion().hasContains()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithOffset() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, 1, myDaoConfig.getPreExpandValueSetsDefaultCountExperimental()); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(1, expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(1, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + + assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8493-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(21); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(22); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8492-1", containsComponent.getCode()); + assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithOffsetAndCount() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, 1, 22); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(1, expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(1, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(22, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + + assertEquals(22, expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8493-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(21); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + } + @Test public void testExpandValueSetWithValueSetCodeAccumulator() { createCodeSystem(); @@ -607,17 +937,6 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection()); } - @Test - public void testValidateCode() { - createCodeSystem(); - - IValidationSupport.CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null); - assertEquals(true, validation.isOk()); - - validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ZZZZZZZ", null); - assertEquals(false, validation.isOk()); - } - @Test public void testStoreTermCodeSystemAndChildren() throws Exception { loadAndPersistCodeSystemWithDesignations(); @@ -985,7 +1304,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); }); myTermSvc.preExpandValueSetToTerminologyTables(); @@ -1003,7 +1322,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); TermValueSetConcept concept = termValueSet.getConcepts().get(0); ourLog.info("Code:\n" + concept.toString()); @@ -1083,7 +1402,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); }); myTermSvc.preExpandValueSetToTerminologyTables(); @@ -1101,7 +1420,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); TermValueSetConcept concept = termValueSet.getConcepts().get(0); ourLog.info("Code:\n" + concept.toString()); @@ -2291,6 +2610,17 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { }); } + @Test + public void testValidateCode() { + createCodeSystem(); + + IValidationSupport.CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null); + assertEquals(true, validation.isOk()); + + validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ZZZZZZZ", null); + assertEquals(false, validation.isOk()); + } + @AfterClass public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 8dae8204771..c8e9278f13a 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -79,6 +79,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { resVerProv.addIndex("IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI"); resVerProv.addIndex("IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID"); + // TermValueSetConceptDesignation + version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION"); + Builder.BuilderWithTableName termValueSetConceptDesignationTable = version.onTable("TRM_VALUESET_C_DESIGNATION"); + termValueSetConceptDesignationTable.addColumn("VALUESET_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG); + termValueSetConceptDesignationTable + .addForeignKey("FK_TRM_VSCD_VS_PID") + .toColumn("VALUESET_PID") + .references("TRM_VALUESET", "PID"); } protected void init400() { diff --git a/src/changes/changes.xml b/src/changes/changes.xml index fc505a69c21..f91b8a758aa 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -73,6 +73,11 @@ The informational message returned in an OperationOutcome when a delete failed due to cascades not being enabled contained an incorrect example. This has been corrected. + + The HAPI FHIR CLI server now uses H2 as its database platform instead of Derby. + Note that this means that data in any existing installations will need to be + re-uploaded to the new database platform. +