mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-02-22 13:55:22 +00:00
Incremental work on large ValueSet expansion support; transaction boundaries need to be broken up.
This commit is contained in:
parent
97a235ddfc
commit
890555a77d
@ -17,8 +17,8 @@ ca.uhn.fhir.rest.client.impl.GenericClient.noPagingLinkFoundInBundle=Can not per
|
||||
ca.uhn.fhir.rest.client.impl.GenericClient.noVersionIdForVread=No version specified in URL for 'vread' operation: {0}
|
||||
ca.uhn.fhir.rest.client.impl.GenericClient.incompleteUriForRead=The given URI is not an absolute URL and is not usable for this operation: {0}
|
||||
ca.uhn.fhir.rest.client.impl.GenericClient.cannotDetermineResourceTypeFromUri=Unable to determine the resource type from the given URI: {0}
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0}
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext).
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0}
|
||||
ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext).
|
||||
|
||||
ca.uhn.fhir.rest.server.interceptor.auth.RuleImplOp.invalidRequestBundleTypeForTransaction=Invalid request Bundle.type value for transaction: {0}
|
||||
|
||||
@ -46,14 +46,14 @@ ca.uhn.fhir.rest.param.BaseParam.chainNotSupported=Invalid search parameter "{0}
|
||||
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.invalidContentTypeInRequest=Incorrect Content-Type header value of "{0}" was provided in the request. A FHIR Content-Type is required for "{1}" operation
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.noContentTypeInRequest=No Content-Type header was provided in the request. This is required for "{0}" operation
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1}
|
||||
ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1}
|
||||
|
||||
ca.uhn.fhir.parser.ParserState.wrongResourceTypeFound=Incorrect resource type found, expected "{0}" but found "{1}"
|
||||
ca.uhn.fhir.rest.server.RestfulServer.getPagesNonHttpGet=Requests for _getpages must use HTTP GET
|
||||
ca.uhn.fhir.rest.server.RestfulServer.unknownMethod=Invalid request: The FHIR endpoint on this server does not know how to handle {0} operation[{1}] with parameters [{2}]
|
||||
ca.uhn.fhir.rest.server.RestfulServer.rootRequest=This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name.
|
||||
ca.uhn.fhir.rest.server.RestfulServer.rootRequest.multitenant=This is the base URL of a multitenant FHIR server. Unable to handle this request, as it does not contain a tenant ID.
|
||||
ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body?
|
||||
ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body?
|
||||
ca.uhn.fhir.validation.FhirValidator.noPhWarningOnStartup=Ph-schematron library not found on classpath, will not attempt to perform schematron validation
|
||||
ca.uhn.fhir.validation.FhirValidator.noPhError=Ph-schematron library not found on classpath, can not enable perform schematron validation
|
||||
ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected during validation
|
||||
@ -62,22 +62,22 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri
|
||||
|
||||
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlMultipleMatches=Invalid match URL "{0}" - Multiple resources match this search
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate index matching query: {1} (existing index belongs to {2})
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\")
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists
|
||||
@ -97,8 +97,8 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulDeletes=Successfully delet
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter=Unknown search parameter "{0}". Value search parameters for this search are: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0}
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation
|
||||
ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request
|
||||
|
||||
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.externalReferenceNotAllowed=Resource contains external reference to URL "{0}" but this server is not configured to allow external references
|
||||
ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1}
|
||||
@ -118,8 +118,8 @@ ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascadi
|
||||
|
||||
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.noAttachmentDataPresent=The resource with ID {0} has no data at path: {1}
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted.
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1}
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted.
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1}
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownType=Content in resource of type {0} at path {1} is not appropriate for binary storage: {2}
|
||||
|
||||
|
||||
@ -127,5 +127,10 @@ ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemU
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.valueSetNotReadyForExpand=ValueSet is not ready for operation $expand; current status: {0} | {1}
|
||||
|
||||
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.notExpanded=The ValueSet is waiting to be picked up and pre-expanded by a scheduled task.
|
||||
ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expansionInProgress=The ValueSet has been picked up by a scheduled task and pre-expansion is in progress.
|
||||
ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expanded=The ValueSet has been picked up by a scheduled task and pre-expansion is complete.
|
||||
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.dao.data;
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetExpansionStatusEnum;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
@ -44,6 +44,6 @@ public interface ITermValueSetDao extends JpaRepository<TermValueSet, Long> {
|
||||
Optional<TermValueSet> findByUrl(@Param("url") String theUrl);
|
||||
|
||||
@Query("SELECT vs FROM TermValueSet vs WHERE vs.myExpansionStatus = :expansion_status")
|
||||
Slice<TermValueSet> findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetExpansionStatusEnum theExpansionStatus);
|
||||
Slice<TermValueSet> findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetPreExpansionStatusEnum theExpansionStatus);
|
||||
|
||||
}
|
||||
|
@ -71,11 +71,11 @@ public class TermValueSet implements Serializable {
|
||||
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(name = "EXPANSION_STATUS", nullable = false, length = MAX_EXPANSION_STATUS_LENGTH)
|
||||
private TermValueSetExpansionStatusEnum myExpansionStatus;
|
||||
private TermValueSetPreExpansionStatusEnum myExpansionStatus;
|
||||
|
||||
public TermValueSet() {
|
||||
super();
|
||||
myExpansionStatus = TermValueSetExpansionStatusEnum.NOT_EXPANDED;
|
||||
myExpansionStatus = TermValueSetPreExpansionStatusEnum.NOT_EXPANDED;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
@ -120,11 +120,11 @@ public class TermValueSet implements Serializable {
|
||||
return myConcepts;
|
||||
}
|
||||
|
||||
public TermValueSetExpansionStatusEnum getExpansionStatus() {
|
||||
public TermValueSetPreExpansionStatusEnum getExpansionStatus() {
|
||||
return myExpansionStatus;
|
||||
}
|
||||
|
||||
public void setExpansionStatus(TermValueSetExpansionStatusEnum theExpansionStatus) {
|
||||
public void setExpansionStatus(TermValueSetPreExpansionStatusEnum theExpansionStatus) {
|
||||
myExpansionStatus = theExpansionStatus;
|
||||
}
|
||||
|
||||
|
@ -1,42 +0,0 @@
|
||||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
/**
|
||||
* This enum is used to indicate the expansion status of a given ValueSet in the terminology tables. In this context,
|
||||
* an expanded ValueSet has its included concepts stored in the terminology tables as well.
|
||||
*/
|
||||
public enum TermValueSetExpansionStatusEnum {
|
||||
|
||||
/**
|
||||
* This status indicates the ValueSet is waiting to be picked up and expanded by a scheduled task.
|
||||
*/
|
||||
NOT_EXPANDED,
|
||||
/**
|
||||
* This status indicates the ValueSet has been picked up by a scheduled task and is mid-expansion.
|
||||
*/
|
||||
EXPANSION_IN_PROGRESS,
|
||||
/**
|
||||
* This status indicates the ValueSet has been picked up by a scheduled task and expansion is complete.
|
||||
*/
|
||||
EXPANDED
|
||||
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This enum is used to indicate the pre-expansion status of a given ValueSet in the terminology tables. In this context,
|
||||
* an expanded ValueSet has its included concepts stored in the terminology tables as well.
|
||||
*/
|
||||
public enum TermValueSetPreExpansionStatusEnum {
|
||||
/**
|
||||
* Sorting agnostic.
|
||||
*/
|
||||
NOT_EXPANDED("notExpanded"),
|
||||
EXPANSION_IN_PROGRESS("expansionInProgress"),
|
||||
EXPANDED("expanded");
|
||||
|
||||
private static Map<String, TermValueSetPreExpansionStatusEnum> ourValues;
|
||||
private String myCode;
|
||||
|
||||
private TermValueSetPreExpansionStatusEnum(String theCode) {
|
||||
myCode = theCode;
|
||||
}
|
||||
|
||||
public String getCode() {
|
||||
return myCode;
|
||||
}
|
||||
|
||||
public static TermValueSetPreExpansionStatusEnum fromCode(String theCode) {
|
||||
if (ourValues == null) {
|
||||
HashMap<String, TermValueSetPreExpansionStatusEnum> values = new HashMap<String, TermValueSetPreExpansionStatusEnum>();
|
||||
for (TermValueSetPreExpansionStatusEnum next : values()) {
|
||||
values.put(next.getCode(), next);
|
||||
}
|
||||
ourValues = Collections.unmodifiableMap(values);
|
||||
}
|
||||
return ourValues.get(theCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from Enum ordinal to Enum type.
|
||||
*
|
||||
* Usage:
|
||||
*
|
||||
* <code>TermValueSetExpansionStatusEnum termValueSetExpansionStatusEnum = TermValueSetExpansionStatusEnum.values[ordinal];</code>
|
||||
*/
|
||||
public static final TermValueSetPreExpansionStatusEnum values[] = values();
|
||||
}
|
@ -498,9 +498,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
|
||||
TermValueSet termValueSet = optionalTermValueSet.get();
|
||||
|
||||
if (TermValueSetExpansionStatusEnum.EXPANDED != termValueSet.getExpansionStatus()) {
|
||||
throw new UnprocessableEntityException("ValueSet is not ready for expansion; current status: " + termValueSet.getExpansionStatus());
|
||||
}
|
||||
validatePreExpansionStatusOfValueSetOrThrowException(termValueSet.getExpansionStatus());
|
||||
|
||||
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
|
||||
expansionComponent.setIdentifier(UUID.randomUUID().toString());
|
||||
@ -515,6 +513,20 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
return valueSet;
|
||||
}
|
||||
|
||||
private void validatePreExpansionStatusOfValueSetOrThrowException(TermValueSetPreExpansionStatusEnum thePreExpansionStatus) {
|
||||
if (TermValueSetPreExpansionStatusEnum.EXPANDED != thePreExpansionStatus) {
|
||||
String statusMsg = myContext.getLocalizer().getMessage(
|
||||
TermValueSetPreExpansionStatusEnum.class,
|
||||
thePreExpansionStatus.getCode());
|
||||
String msg = myContext.getLocalizer().getMessage(
|
||||
BaseHapiTerminologySvcImpl.class,
|
||||
"valueSetNotReadyForExpand",
|
||||
thePreExpansionStatus.name(),
|
||||
statusMsg);
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private void populateExpansionComponent(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) {
|
||||
int total = myValueSetConceptDao.countByTermValueSetId(theTermValueSet.getId());
|
||||
theExpansionComponent.setTotal(total);
|
||||
@ -815,11 +827,34 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
StopWatch sw = new StopWatch();
|
||||
AtomicInteger count = new AtomicInteger(0);
|
||||
|
||||
for (Object next : jpaQuery.getResultList()) {
|
||||
count.incrementAndGet();
|
||||
TermConcept concept = (TermConcept) next;
|
||||
addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter);
|
||||
}
|
||||
int maxResultsPerBatch = 10000;
|
||||
jpaQuery.setMaxResults(maxResultsPerBatch);
|
||||
jpaQuery.setFirstResult(0);
|
||||
|
||||
ourLog.info("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch);
|
||||
|
||||
do {
|
||||
StopWatch swForBatch = new StopWatch();
|
||||
AtomicInteger countForBatch = new AtomicInteger(0);
|
||||
|
||||
List resultList = jpaQuery.getResultList();
|
||||
int resultsInBatch = jpaQuery.getResultSize();
|
||||
int firstResult = jpaQuery.getFirstResult();
|
||||
for (Object next : resultList) {
|
||||
count.incrementAndGet();
|
||||
countForBatch.incrementAndGet();
|
||||
TermConcept concept = (TermConcept) next;
|
||||
addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter);
|
||||
}
|
||||
|
||||
ourLog.info("Batch expansion for {} with starting index of {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), firstResult, countForBatch, swForBatch.getMillis());
|
||||
|
||||
if (resultsInBatch < maxResultsPerBatch) {
|
||||
break;
|
||||
} else {
|
||||
jpaQuery.setFirstResult(firstResult + maxResultsPerBatch);
|
||||
}
|
||||
} while (true);
|
||||
|
||||
ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis());
|
||||
|
||||
@ -1280,28 +1315,28 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public synchronized void saveDeferred() {
|
||||
if (!myProcessDeferred) {
|
||||
if (isProcessDeferredPaused()) {
|
||||
return;
|
||||
} else if (myDeferredConcepts.isEmpty() && myConceptLinksToSaveLater.isEmpty()) {
|
||||
} else if (isNoDeferredConceptsAndNoConceptLinksToSaveLater()) {
|
||||
processReindexing();
|
||||
}
|
||||
|
||||
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
|
||||
tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) {
|
||||
if (isDeferredConceptsOrConceptLinksToSaveLater()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredConcepts();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
if (myDeferredValueSets.size() > 0) {
|
||||
if (isDeferredValueSets()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredValueSets();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
if (myDeferredConceptMaps.size() > 0) {
|
||||
if (isDeferredConceptMaps()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredConceptMaps();
|
||||
return null;
|
||||
@ -1310,6 +1345,42 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
|
||||
}
|
||||
|
||||
private boolean isProcessDeferredPaused() {
|
||||
return !myProcessDeferred;
|
||||
}
|
||||
|
||||
private boolean isNoDeferredConceptsAndNoConceptLinksToSaveLater() {
|
||||
return isNoDeferredConcepts() && isNoConceptLinksToSaveLater();
|
||||
}
|
||||
|
||||
private boolean isDeferredConceptsOrConceptLinksToSaveLater() {
|
||||
return isDeferredConcepts() || isConceptLinksToSaveLater();
|
||||
}
|
||||
|
||||
private boolean isDeferredConcepts() {
|
||||
return !myDeferredConcepts.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isNoDeferredConcepts() {
|
||||
return myDeferredConcepts.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isConceptLinksToSaveLater() {
|
||||
return !myConceptLinksToSaveLater.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isNoConceptLinksToSaveLater() {
|
||||
return myConceptLinksToSaveLater.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isDeferredValueSets() {
|
||||
return !myDeferredValueSets.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isDeferredConceptMaps() {
|
||||
return !myDeferredConceptMaps.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
|
||||
myApplicationContext = theApplicationContext;
|
||||
@ -1657,6 +1728,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
@Scheduled(fixedDelay = 60000) // FIXME: DM 2019-08-19 - Remove this!
|
||||
@Override
|
||||
public synchronized void preExpandValueSetToTerminologyTables() {
|
||||
if (isNotSafeToPreExpandValueSets()) {
|
||||
ourLog.info("Skipping scheduled pre-expansion of ValueSets while deferred entities are being loaded.");
|
||||
return;
|
||||
}
|
||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||
@ -1665,14 +1740,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
Optional<TermValueSet> optionalTermValueSet = getNextTermValueSetNotExpanded();
|
||||
if (optionalTermValueSet.isPresent()) {
|
||||
TermValueSet termValueSet = optionalTermValueSet.get();
|
||||
termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANSION_IN_PROGRESS);
|
||||
termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS);
|
||||
myValueSetDao.saveAndFlush(termValueSet);
|
||||
|
||||
ValueSet valueSet = getValueSetFromResourceTable(termValueSet.getResource());
|
||||
|
||||
expandValueSet(valueSet, new ValueSetConceptAccumulator(termValueSet, myValueSetConceptDao, myValueSetConceptDesignationDao));
|
||||
|
||||
termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANDED);
|
||||
termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED);
|
||||
myValueSetDao.saveAndFlush(termValueSet);
|
||||
} else {
|
||||
hasNextTermValueSetNotExpanded = false;
|
||||
@ -1682,11 +1757,39 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isNotSafeToPreExpandValueSets() {
|
||||
return !isSafeToPreExpandValueSets();
|
||||
}
|
||||
|
||||
private boolean isSafeToPreExpandValueSets() {
|
||||
if (isProcessDeferredPaused()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isDeferredConcepts()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isConceptLinksToSaveLater()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isDeferredValueSets()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isDeferredConceptMaps()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected abstract ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable);
|
||||
|
||||
private Optional<TermValueSet> getNextTermValueSetNotExpanded() {
|
||||
Optional<TermValueSet> retVal = Optional.empty();
|
||||
Slice<TermValueSet> page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetExpansionStatusEnum.NOT_EXPANDED);
|
||||
Slice<TermValueSet> page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED);
|
||||
|
||||
if (!page.getContent().isEmpty()) {
|
||||
retVal = Optional.of(page.getContent().get(0));
|
||||
|
@ -103,10 +103,10 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
||||
}
|
||||
myValueSetConceptDao.save(concept);
|
||||
|
||||
if (myConceptsSaved++ % 250 == 0) {
|
||||
// if (myConceptsSaved++ % 2 == 0) { // FIXME: DM 2019-08-23 - Reset to 250.
|
||||
ourLog.info("Have pre-expanded {} concepts in ValueSet[{}]", myConceptsSaved, myTermValueSet.getUrl());
|
||||
myValueSetConceptDao.flush();
|
||||
}
|
||||
// myValueSetConceptDao.flush();
|
||||
// }
|
||||
|
||||
return concept;
|
||||
}
|
||||
@ -128,7 +128,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
||||
designation.setValue(theDesignation.getValue());
|
||||
myValueSetConceptDesignationDao.save(designation);
|
||||
|
||||
if (myDesignationsSaved++ % 250 == 0) {
|
||||
if (myDesignationsSaved++ % 2 == 0) { // FIXME: DM 2019-08-23 - Reset to 250.
|
||||
ourLog.info("Have pre-expanded {} designations in ValueSet[{}]", myDesignationsSaved, myTermValueSet.getUrl());
|
||||
myValueSetConceptDesignationDao.flush();
|
||||
}
|
||||
|
@ -606,6 +606,14 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
||||
loadAndPersistCodeSystem();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTest() {
|
||||
ourLog.info("as is: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS);
|
||||
ourLog.info("toString: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.toString());
|
||||
ourLog.info("name: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.name());
|
||||
ourLog.info("getCode: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.getCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDuplicateConceptMapUrls() {
|
||||
createAndPersistConceptMap();
|
||||
@ -1296,7 +1304,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
});
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
@ -1314,7 +1322,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
|
||||
TermValueSetConcept concept = termValueSet.getConcepts().get(0);
|
||||
ourLog.info("Code:\n" + concept.toString());
|
||||
@ -1394,7 +1402,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus());
|
||||
});
|
||||
|
||||
myTermSvc.preExpandValueSetToTerminologyTables();
|
||||
@ -1412,7 +1420,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
|
||||
TermValueSetConcept concept = termValueSet.getConcepts().get(0);
|
||||
ourLog.info("Code:\n" + concept.toString());
|
||||
|
@ -79,6 +79,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
resVerProv.addIndex("IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI");
|
||||
resVerProv.addIndex("IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID");
|
||||
|
||||
// TermValueSetConceptDesignation
|
||||
version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION");
|
||||
Builder.BuilderWithTableName termValueSetConceptDesignationTable = version.onTable("TRM_VALUESET_C_DESIGNATION");
|
||||
termValueSetConceptDesignationTable.addColumn("VALUESET_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
termValueSetConceptDesignationTable
|
||||
.addForeignKey("FK_TRM_VSCD_VS_PID")
|
||||
.toColumn("VALUESET_PID")
|
||||
.references("TRM_VALUESET", "PID");
|
||||
}
|
||||
|
||||
protected void init400() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user