Merge remote-tracking branch 'origin/master' into mb-fix-history-prefetch
This commit is contained in:
commit
ae4dceeca2
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -530,8 +530,13 @@ public interface IValidationSupport {
|
||||||
public abstract String getType();
|
public abstract String getType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The reason these cannot be declared within an enum is because a Remote Terminology Service
|
||||||
|
// can support arbitrary types. We do not restrict against the types in the spec.
|
||||||
|
// Some of the types in the spec are not yet implemented as well.
|
||||||
|
// @see https://github.com/hapifhir/hapi-fhir/issues/5700
|
||||||
String TYPE_STRING = "string";
|
String TYPE_STRING = "string";
|
||||||
String TYPE_CODING = "Coding";
|
String TYPE_CODING = "Coding";
|
||||||
|
String TYPE_GROUP = "group";
|
||||||
|
|
||||||
class StringConceptProperty extends BaseConceptProperty {
|
class StringConceptProperty extends BaseConceptProperty {
|
||||||
private final String myValue;
|
private final String myValue;
|
||||||
|
@ -589,6 +594,31 @@ public interface IValidationSupport {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class GroupConceptProperty extends BaseConceptProperty {
|
||||||
|
public GroupConceptProperty(String thePropertyName) {
|
||||||
|
super(thePropertyName);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<BaseConceptProperty> subProperties;
|
||||||
|
|
||||||
|
public BaseConceptProperty addSubProperty(BaseConceptProperty theProperty) {
|
||||||
|
if (subProperties == null) {
|
||||||
|
subProperties = new ArrayList<>();
|
||||||
|
}
|
||||||
|
subProperties.add(theProperty);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<BaseConceptProperty> getSubProperties() {
|
||||||
|
return subProperties != null ? subProperties : Collections.emptyList();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getType() {
|
||||||
|
return TYPE_GROUP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
class CodeValidationResult {
|
class CodeValidationResult {
|
||||||
public static final String SOURCE_DETAILS = "sourceDetails";
|
public static final String SOURCE_DETAILS = "sourceDetails";
|
||||||
public static final String RESULT = "result";
|
public static final String RESULT = "result";
|
||||||
|
@ -871,8 +901,15 @@ public interface IValidationSupport {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts the current LookupCodeResult instance into a IBaseParameters instance which is returned
|
||||||
|
* to the client of the $lookup operation.
|
||||||
|
* @param theContext the FHIR context used for running the operation
|
||||||
|
* @param thePropertyNamesToFilter the properties which are passed as parameter to filter the result.
|
||||||
|
* @return the output for the lookup operation.
|
||||||
|
*/
|
||||||
public IBaseParameters toParameters(
|
public IBaseParameters toParameters(
|
||||||
FhirContext theContext, List<? extends IPrimitiveType<String>> thePropertyNames) {
|
FhirContext theContext, List<? extends IPrimitiveType<String>> thePropertyNamesToFilter) {
|
||||||
|
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(theContext);
|
IBaseParameters retVal = ParametersUtil.newInstance(theContext);
|
||||||
if (isNotBlank(getCodeSystemDisplayName())) {
|
if (isNotBlank(getCodeSystemDisplayName())) {
|
||||||
|
@ -886,50 +923,29 @@ public interface IValidationSupport {
|
||||||
|
|
||||||
if (myProperties != null) {
|
if (myProperties != null) {
|
||||||
|
|
||||||
Set<String> properties = Collections.emptySet();
|
final List<BaseConceptProperty> propertiesToReturn;
|
||||||
if (thePropertyNames != null) {
|
if (thePropertyNamesToFilter != null && !thePropertyNamesToFilter.isEmpty()) {
|
||||||
properties = thePropertyNames.stream()
|
// TODO MM: The logic to filter of properties could actually be moved to the lookupCode provider.
|
||||||
|
// That is where the rest of the lookupCode input parameter handling is done.
|
||||||
|
// This was left as is for now but can be done with next opportunity.
|
||||||
|
Set<String> propertyNameList = thePropertyNamesToFilter.stream()
|
||||||
.map(IPrimitiveType::getValueAsString)
|
.map(IPrimitiveType::getValueAsString)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
propertiesToReturn = myProperties.stream()
|
||||||
|
.filter(p -> propertyNameList.contains(p.getPropertyName()))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
} else {
|
||||||
|
propertiesToReturn = myProperties;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (BaseConceptProperty next : myProperties) {
|
for (BaseConceptProperty next : propertiesToReturn) {
|
||||||
String propertyName = next.getPropertyName();
|
|
||||||
|
|
||||||
if (!properties.isEmpty() && !properties.contains(propertyName)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
IBase property = ParametersUtil.addParameterToParameters(theContext, retVal, "property");
|
IBase property = ParametersUtil.addParameterToParameters(theContext, retVal, "property");
|
||||||
ParametersUtil.addPartCode(theContext, property, "code", propertyName);
|
populateProperty(theContext, property, next);
|
||||||
|
|
||||||
String propertyType = next.getType();
|
|
||||||
switch (propertyType) {
|
|
||||||
case TYPE_STRING:
|
|
||||||
StringConceptProperty stringConceptProperty = (StringConceptProperty) next;
|
|
||||||
ParametersUtil.addPartString(
|
|
||||||
theContext, property, "value", stringConceptProperty.getValue());
|
|
||||||
break;
|
|
||||||
case TYPE_CODING:
|
|
||||||
CodingConceptProperty codingConceptProperty = (CodingConceptProperty) next;
|
|
||||||
ParametersUtil.addPartCoding(
|
|
||||||
theContext,
|
|
||||||
property,
|
|
||||||
"value",
|
|
||||||
codingConceptProperty.getCodeSystem(),
|
|
||||||
codingConceptProperty.getCode(),
|
|
||||||
codingConceptProperty.getDisplay());
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalStateException(
|
|
||||||
Msg.code(1739) + "Don't know how to handle " + next.getClass());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (myDesignations != null) {
|
if (myDesignations != null) {
|
||||||
for (ConceptDesignation next : myDesignations) {
|
for (ConceptDesignation next : myDesignations) {
|
||||||
|
|
||||||
IBase property = ParametersUtil.addParameterToParameters(theContext, retVal, "designation");
|
IBase property = ParametersUtil.addParameterToParameters(theContext, retVal, "designation");
|
||||||
ParametersUtil.addPartCode(theContext, property, "language", next.getLanguage());
|
ParametersUtil.addPartCode(theContext, property, "language", next.getLanguage());
|
||||||
ParametersUtil.addPartCoding(
|
ParametersUtil.addPartCoding(
|
||||||
|
@ -941,6 +957,41 @@ public interface IValidationSupport {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void populateProperty(
|
||||||
|
FhirContext theContext, IBase theProperty, BaseConceptProperty theConceptProperty) {
|
||||||
|
ParametersUtil.addPartCode(theContext, theProperty, "code", theConceptProperty.getPropertyName());
|
||||||
|
String propertyType = theConceptProperty.getType();
|
||||||
|
switch (propertyType) {
|
||||||
|
case TYPE_STRING:
|
||||||
|
StringConceptProperty stringConceptProperty = (StringConceptProperty) theConceptProperty;
|
||||||
|
ParametersUtil.addPartString(theContext, theProperty, "value", stringConceptProperty.getValue());
|
||||||
|
break;
|
||||||
|
case TYPE_CODING:
|
||||||
|
CodingConceptProperty codingConceptProperty = (CodingConceptProperty) theConceptProperty;
|
||||||
|
ParametersUtil.addPartCoding(
|
||||||
|
theContext,
|
||||||
|
theProperty,
|
||||||
|
"value",
|
||||||
|
codingConceptProperty.getCodeSystem(),
|
||||||
|
codingConceptProperty.getCode(),
|
||||||
|
codingConceptProperty.getDisplay());
|
||||||
|
break;
|
||||||
|
case TYPE_GROUP:
|
||||||
|
GroupConceptProperty groupConceptProperty = (GroupConceptProperty) theConceptProperty;
|
||||||
|
if (groupConceptProperty.getSubProperties().isEmpty()) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
groupConceptProperty.getSubProperties().forEach(p -> {
|
||||||
|
IBase subProperty = ParametersUtil.addPart(theContext, theProperty, "subproperty", null);
|
||||||
|
populateProperty(theContext, subProperty, p);
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException(
|
||||||
|
Msg.code(1739) + "Don't know how to handle " + theConceptProperty.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void setErrorMessage(String theErrorMessage) {
|
public void setErrorMessage(String theErrorMessage) {
|
||||||
myErrorMessage = theErrorMessage;
|
myErrorMessage = theErrorMessage;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2387,13 +2387,19 @@ public enum Pointcut implements IPointcut {
|
||||||
* <li>
|
* <li>
|
||||||
* ca.uhn.fhir.mdm.model.mdmevents.MdmMergeEvent - Contains information about the from and to resources.
|
* ca.uhn.fhir.mdm.model.mdmevents.MdmMergeEvent - Contains information about the from and to resources.
|
||||||
* </li>
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.mdm.model.mdmevents.MdmTransactionContext - Contains information about the Transaction context, e.g. merge or link.
|
||||||
|
* </li>
|
||||||
* </ul>
|
* </ul>
|
||||||
* <p>
|
* <p>
|
||||||
* Hooks should return <code>void</code>.
|
* Hooks should return <code>void</code>.
|
||||||
* </p>
|
* </p>
|
||||||
*/
|
*/
|
||||||
MDM_POST_MERGE_GOLDEN_RESOURCES(
|
MDM_POST_MERGE_GOLDEN_RESOURCES(
|
||||||
void.class, "ca.uhn.fhir.rest.api.server.RequestDetails", "ca.uhn.fhir.mdm.model.mdmevents.MdmMergeEvent"),
|
void.class,
|
||||||
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
|
"ca.uhn.fhir.mdm.model.mdmevents.MdmMergeEvent",
|
||||||
|
"ca.uhn.fhir.mdm.model.MdmTransactionContext"),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <b>MDM Link History Hook:</b>
|
* <b>MDM Link History Hook:</b>
|
||||||
|
|
|
@ -26,13 +26,16 @@ import java.util.LinkedList;
|
||||||
import java.util.NoSuchElementException;
|
import java.util.NoSuchElementException;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This paging iterator only works with already ordered queries
|
||||||
|
*/
|
||||||
public class PagingIterator<T> implements Iterator<T> {
|
public class PagingIterator<T> implements Iterator<T> {
|
||||||
|
|
||||||
public interface PageFetcher<T> {
|
public interface PageFetcher<T> {
|
||||||
void fetchNextPage(int thePageIndex, int theBatchSize, Consumer<T> theConsumer);
|
void fetchNextPage(int thePageIndex, int theBatchSize, Consumer<T> theConsumer);
|
||||||
}
|
}
|
||||||
|
|
||||||
static final int PAGE_SIZE = 100;
|
static final int DEFAULT_PAGE_SIZE = 100;
|
||||||
|
|
||||||
private int myPage;
|
private int myPage;
|
||||||
|
|
||||||
|
@ -42,8 +45,16 @@ public class PagingIterator<T> implements Iterator<T> {
|
||||||
|
|
||||||
private final PageFetcher<T> myFetcher;
|
private final PageFetcher<T> myFetcher;
|
||||||
|
|
||||||
|
private final int myPageSize;
|
||||||
|
|
||||||
public PagingIterator(PageFetcher<T> theFetcher) {
|
public PagingIterator(PageFetcher<T> theFetcher) {
|
||||||
|
this(DEFAULT_PAGE_SIZE, theFetcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
public PagingIterator(int thePageSize, PageFetcher<T> theFetcher) {
|
||||||
|
assert thePageSize > 0 : "Page size must be a positive value";
|
||||||
myFetcher = theFetcher;
|
myFetcher = theFetcher;
|
||||||
|
myPageSize = thePageSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -66,9 +77,9 @@ public class PagingIterator<T> implements Iterator<T> {
|
||||||
|
|
||||||
private void fetchNextBatch() {
|
private void fetchNextBatch() {
|
||||||
if (!myIsFinished && myCurrentBatch.isEmpty()) {
|
if (!myIsFinished && myCurrentBatch.isEmpty()) {
|
||||||
myFetcher.fetchNextPage(myPage, PAGE_SIZE, myCurrentBatch::add);
|
myFetcher.fetchNextPage(myPage, myPageSize, myCurrentBatch::add);
|
||||||
myPage++;
|
myPage++;
|
||||||
myIsFinished = myCurrentBatch.size() < PAGE_SIZE;
|
myIsFinished = myCurrentBatch.size() < myPageSize;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -626,6 +626,16 @@ public class BundleBuilder {
|
||||||
terser.setElement(myBundle, "Bundle.timestamp", theTimestamp.getValueAsString());
|
terser.setElement(myBundle, "Bundle.timestamp", theTimestamp.getValueAsString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a profile URL to <code>Bundle.meta.profile</code>
|
||||||
|
*
|
||||||
|
* @since 7.4.0
|
||||||
|
*/
|
||||||
|
public void addProfile(String theProfile) {
|
||||||
|
FhirTerser terser = myContext.newTerser();
|
||||||
|
terser.addElement(myBundle, "Bundle.meta.profile", theProfile);
|
||||||
|
}
|
||||||
|
|
||||||
public class DeleteBuilder extends BaseOperationBuilder {
|
public class DeleteBuilder extends BaseOperationBuilder {
|
||||||
|
|
||||||
// nothing yet
|
// nothing yet
|
||||||
|
|
|
@ -39,6 +39,7 @@ import ca.uhn.fhir.util.bundle.SearchBundleEntryParts;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.tuple.Pair;
|
import org.apache.commons.lang3.tuple.Pair;
|
||||||
import org.hl7.fhir.instance.model.api.IBase;
|
import org.hl7.fhir.instance.model.api.IBase;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||||
|
@ -59,6 +60,7 @@ import java.util.Set;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
import static org.hl7.fhir.instance.model.api.IBaseBundle.LINK_PREV;
|
import static org.hl7.fhir.instance.model.api.IBaseBundle.LINK_PREV;
|
||||||
|
@ -67,6 +69,12 @@ import static org.hl7.fhir.instance.model.api.IBaseBundle.LINK_PREV;
|
||||||
* Fetch resources from a bundle
|
* Fetch resources from a bundle
|
||||||
*/
|
*/
|
||||||
public class BundleUtil {
|
public class BundleUtil {
|
||||||
|
|
||||||
|
/** Non instantiable */
|
||||||
|
private BundleUtil() {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(BundleUtil.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(BundleUtil.class);
|
||||||
|
|
||||||
private static final String PREVIOUS = LINK_PREV;
|
private static final String PREVIOUS = LINK_PREV;
|
||||||
|
@ -339,6 +347,66 @@ public class BundleUtil {
|
||||||
TerserUtil.setField(theContext, "entry", theBundle, retVal.toArray(new IBase[0]));
|
TerserUtil.setField(theContext, "entry", theBundle, retVal.toArray(new IBase[0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a Bundle containing resources into a FHIR transaction which
|
||||||
|
* creates/updates the resources. This method does not modify the original
|
||||||
|
* bundle, but returns a new copy.
|
||||||
|
* <p>
|
||||||
|
* This method is mostly intended for test scenarios where you have a Bundle
|
||||||
|
* containing search results or other sourced resources, and want to upload
|
||||||
|
* these resources to a server using a single FHIR transaction.
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* The Bundle is converted using the following logic:
|
||||||
|
* <ul>
|
||||||
|
* <li>Bundle.type is changed to <code>transaction</code></li>
|
||||||
|
* <li>Bundle.request.method is changed to <code>PUT</code></li>
|
||||||
|
* <li>Bundle.request.url is changed to <code>[resourceType]/[id]</code></li>
|
||||||
|
* <li>Bundle.fullUrl is changed to <code>[resourceType]/[id]</code></li>
|
||||||
|
* </ul>
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @param theContext The FhirContext to use with the bundle
|
||||||
|
* @param theBundle The Bundle to modify. All resources in the Bundle should have an ID.
|
||||||
|
* @param thePrefixIdsOrNull If not <code>null</code>, all resource IDs and all references in the Bundle will be
|
||||||
|
* modified to such that their IDs contain the given prefix. For example, for a value
|
||||||
|
* of "A", the resource "Patient/123" will be changed to be "Patient/A123". If set to
|
||||||
|
* <code>null</code>, resource IDs are unchanged.
|
||||||
|
* @since 7.4.0
|
||||||
|
*/
|
||||||
|
public static <T extends IBaseBundle> T convertBundleIntoTransaction(
|
||||||
|
@Nonnull FhirContext theContext, @Nonnull T theBundle, @Nullable String thePrefixIdsOrNull) {
|
||||||
|
String prefix = defaultString(thePrefixIdsOrNull);
|
||||||
|
|
||||||
|
BundleBuilder bb = new BundleBuilder(theContext);
|
||||||
|
|
||||||
|
FhirTerser terser = theContext.newTerser();
|
||||||
|
List<IBase> entries = terser.getValues(theBundle, "Bundle.entry");
|
||||||
|
for (var entry : entries) {
|
||||||
|
IBaseResource resource = terser.getSingleValueOrNull(entry, "resource", IBaseResource.class);
|
||||||
|
if (resource != null) {
|
||||||
|
Validate.isTrue(resource.getIdElement().hasIdPart(), "Resource in bundle has no ID");
|
||||||
|
String newId = theContext.getResourceType(resource) + "/" + prefix
|
||||||
|
+ resource.getIdElement().getIdPart();
|
||||||
|
|
||||||
|
IBaseResource resourceClone = terser.clone(resource);
|
||||||
|
resourceClone.setId(newId);
|
||||||
|
|
||||||
|
if (isNotBlank(prefix)) {
|
||||||
|
for (var ref : terser.getAllResourceReferences(resourceClone)) {
|
||||||
|
var refElement = ref.getResourceReference().getReferenceElement();
|
||||||
|
ref.getResourceReference()
|
||||||
|
.setReference(refElement.getResourceType() + "/" + prefix + refElement.getIdPart());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bb.addTransactionUpdateEntry(resourceClone);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return bb.getBundleTyped();
|
||||||
|
}
|
||||||
|
|
||||||
private static void validatePartsNotNull(LinkedHashSet<IBase> theDeleteParts) {
|
private static void validatePartsNotNull(LinkedHashSet<IBase> theDeleteParts) {
|
||||||
if (theDeleteParts == null) {
|
if (theDeleteParts == null) {
|
||||||
throw new IllegalStateException(
|
throw new IllegalStateException(
|
||||||
|
|
|
@ -433,7 +433,7 @@ public class ParametersUtil {
|
||||||
addPart(theContext, theParameter, theName, coding);
|
addPart(theContext, theParameter, theName, coding);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void addPart(FhirContext theContext, IBase theParameter, String theName, IBase theValue) {
|
public static IBase addPart(FhirContext theContext, IBase theParameter, String theName, @Nullable IBase theValue) {
|
||||||
BaseRuntimeElementCompositeDefinition<?> def =
|
BaseRuntimeElementCompositeDefinition<?> def =
|
||||||
(BaseRuntimeElementCompositeDefinition<?>) theContext.getElementDefinition(theParameter.getClass());
|
(BaseRuntimeElementCompositeDefinition<?>) theContext.getElementDefinition(theParameter.getClass());
|
||||||
BaseRuntimeChildDefinition partChild = def.getChildByName("part");
|
BaseRuntimeChildDefinition partChild = def.getChildByName("part");
|
||||||
|
@ -448,12 +448,15 @@ public class ParametersUtil {
|
||||||
name.setValue(theName);
|
name.setValue(theName);
|
||||||
partChildElem.getChildByName("name").getMutator().addValue(part, name);
|
partChildElem.getChildByName("name").getMutator().addValue(part, name);
|
||||||
|
|
||||||
|
if (theValue != null) {
|
||||||
if (theValue instanceof IBaseResource) {
|
if (theValue instanceof IBaseResource) {
|
||||||
partChildElem.getChildByName("resource").getMutator().addValue(part, theValue);
|
partChildElem.getChildByName("resource").getMutator().addValue(part, theValue);
|
||||||
} else {
|
} else {
|
||||||
partChildElem.getChildByName("value[x]").getMutator().addValue(part, theValue);
|
partChildElem.getChildByName("value[x]").getMutator().addValue(part, theValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return part;
|
||||||
|
}
|
||||||
|
|
||||||
public static void addPartResource(
|
public static void addPartResource(
|
||||||
FhirContext theContext, IBase theParameter, String theName, IBaseResource theValue) {
|
FhirContext theContext, IBase theParameter, String theName, IBaseResource theValue) {
|
||||||
|
|
|
@ -21,16 +21,20 @@ package ca.uhn.fhir.util;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hl7.fhir.instance.model.api.IBase;
|
import org.hl7.fhir.instance.model.api.IBase;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -59,8 +63,12 @@ public class SearchParameterUtil {
|
||||||
* 1. Attempt to find one called 'patient'
|
* 1. Attempt to find one called 'patient'
|
||||||
* 2. If that fails, find one called 'subject'
|
* 2. If that fails, find one called 'subject'
|
||||||
* 3. If that fails, find one by Patient Compartment.
|
* 3. If that fails, find one by Patient Compartment.
|
||||||
* 3.1 If that returns >1 result, throw an error
|
* 3.1 If that returns exactly 1 result then return it
|
||||||
* 3.2 If that returns 1 result, return it
|
* 3.2 If that doesn't return exactly 1 result and is R4, fall to 3.3, otherwise, 3.5
|
||||||
|
* 3.3 If that returns >1 result, throw an error
|
||||||
|
* 3.4 If that returns 1 result, return it
|
||||||
|
* 3.5 Find the search parameters by patient compartment using the R4 FHIR path, and return it if there is 1 result,
|
||||||
|
* otherwise, fall to 3.3
|
||||||
*/
|
*/
|
||||||
public static Optional<RuntimeSearchParam> getOnlyPatientSearchParamForResourceType(
|
public static Optional<RuntimeSearchParam> getOnlyPatientSearchParamForResourceType(
|
||||||
FhirContext theFhirContext, String theResourceType) {
|
FhirContext theFhirContext, String theResourceType) {
|
||||||
|
@ -70,10 +78,40 @@ public class SearchParameterUtil {
|
||||||
if (myPatientSearchParam == null) {
|
if (myPatientSearchParam == null) {
|
||||||
myPatientSearchParam = runtimeResourceDefinition.getSearchParam("subject");
|
myPatientSearchParam = runtimeResourceDefinition.getSearchParam("subject");
|
||||||
if (myPatientSearchParam == null) {
|
if (myPatientSearchParam == null) {
|
||||||
myPatientSearchParam = getOnlyPatientCompartmentRuntimeSearchParam(runtimeResourceDefinition);
|
final List<RuntimeSearchParam> searchParamsForCurrentVersion =
|
||||||
|
runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient");
|
||||||
|
final List<RuntimeSearchParam> searchParamsToUse;
|
||||||
|
// We want to handle a narrow code path in which attempting to process SearchParameters for a non-R4
|
||||||
|
// resource would have failed, and instead make another attempt to process them with the R4-equivalent
|
||||||
|
// FHIR path.
|
||||||
|
if (FhirVersionEnum.R4 == theFhirContext.getVersion().getVersion()
|
||||||
|
|| searchParamsForCurrentVersion.size() == 1) {
|
||||||
|
searchParamsToUse = searchParamsForCurrentVersion;
|
||||||
|
} else {
|
||||||
|
searchParamsToUse =
|
||||||
|
checkR4PatientCompartmentForMatchingSearchParam(runtimeResourceDefinition, theResourceType);
|
||||||
|
}
|
||||||
|
myPatientSearchParam =
|
||||||
|
validateSearchParamsAndReturnOnlyOne(runtimeResourceDefinition, searchParamsToUse);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Optional.ofNullable(myPatientSearchParam);
|
return Optional.of(myPatientSearchParam);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nonnull
|
||||||
|
private static List<RuntimeSearchParam> checkR4PatientCompartmentForMatchingSearchParam(
|
||||||
|
RuntimeResourceDefinition theRuntimeResourceDefinition, String theResourceType) {
|
||||||
|
final RuntimeSearchParam patientSearchParamForR4 =
|
||||||
|
FhirContext.forR4Cached().getResourceDefinition(theResourceType).getSearchParam("patient");
|
||||||
|
|
||||||
|
return Optional.ofNullable(patientSearchParamForR4)
|
||||||
|
.map(patientSearchParamForR4NonNull ->
|
||||||
|
theRuntimeResourceDefinition.getSearchParamsForCompartmentName("Patient").stream()
|
||||||
|
.filter(searchParam -> searchParam.getPath() != null)
|
||||||
|
.filter(searchParam ->
|
||||||
|
searchParam.getPath().equals(patientSearchParamForR4NonNull.getPath()))
|
||||||
|
.collect(Collectors.toList()))
|
||||||
|
.orElse(Collections.emptyList());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -94,7 +132,7 @@ public class SearchParameterUtil {
|
||||||
if (mySubjectSearchParam != null) {
|
if (mySubjectSearchParam != null) {
|
||||||
searchParams.add(mySubjectSearchParam);
|
searchParams.add(mySubjectSearchParam);
|
||||||
}
|
}
|
||||||
if (searchParams == null || searchParams.size() == 0) {
|
if (CollectionUtils.isEmpty(searchParams)) {
|
||||||
String errorMessage = String.format(
|
String errorMessage = String.format(
|
||||||
"Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter",
|
"Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter",
|
||||||
runtimeResourceDefinition.getId());
|
runtimeResourceDefinition.getId());
|
||||||
|
@ -115,19 +153,34 @@ public class SearchParameterUtil {
|
||||||
|
|
||||||
public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam(
|
public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam(
|
||||||
RuntimeResourceDefinition runtimeResourceDefinition) {
|
RuntimeResourceDefinition runtimeResourceDefinition) {
|
||||||
RuntimeSearchParam patientSearchParam;
|
return validateSearchParamsAndReturnOnlyOne(
|
||||||
List<RuntimeSearchParam> searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient");
|
runtimeResourceDefinition, runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"));
|
||||||
if (searchParams == null || searchParams.size() == 0) {
|
}
|
||||||
|
|
||||||
|
public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam(
|
||||||
|
RuntimeResourceDefinition runtimeResourceDefinition, List<RuntimeSearchParam> theSearchParams) {
|
||||||
|
return validateSearchParamsAndReturnOnlyOne(runtimeResourceDefinition, theSearchParams);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nonnull
|
||||||
|
private static RuntimeSearchParam validateSearchParamsAndReturnOnlyOne(
|
||||||
|
RuntimeResourceDefinition theRuntimeResourceDefinition, List<RuntimeSearchParam> theSearchParams) {
|
||||||
|
final RuntimeSearchParam patientSearchParam;
|
||||||
|
if (CollectionUtils.isEmpty(theSearchParams)) {
|
||||||
String errorMessage = String.format(
|
String errorMessage = String.format(
|
||||||
"Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter",
|
"Resource type [%s] for ID [%s] and version: [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter",
|
||||||
runtimeResourceDefinition.getId());
|
theRuntimeResourceDefinition.getName(),
|
||||||
|
theRuntimeResourceDefinition.getId(),
|
||||||
|
theRuntimeResourceDefinition.getStructureVersion());
|
||||||
throw new IllegalArgumentException(Msg.code(1774) + errorMessage);
|
throw new IllegalArgumentException(Msg.code(1774) + errorMessage);
|
||||||
} else if (searchParams.size() == 1) {
|
} else if (theSearchParams.size() == 1) {
|
||||||
patientSearchParam = searchParams.get(0);
|
patientSearchParam = theSearchParams.get(0);
|
||||||
} else {
|
} else {
|
||||||
String errorMessage = String.format(
|
String errorMessage = String.format(
|
||||||
"Resource type %s has more than one Search Param which references a patient compartment. We are unable to disambiguate which patient search parameter we should be searching by.",
|
"Resource type [%s] for ID [%s] and version: [%s] has more than one Search Param which references a patient compartment. We are unable to disambiguate which patient search parameter we should be searching by.",
|
||||||
runtimeResourceDefinition.getId());
|
theRuntimeResourceDefinition.getName(),
|
||||||
|
theRuntimeResourceDefinition.getId(),
|
||||||
|
theRuntimeResourceDefinition.getStructureVersion());
|
||||||
throw new IllegalArgumentException(Msg.code(1775) + errorMessage);
|
throw new IllegalArgumentException(Msg.code(1775) + errorMessage);
|
||||||
}
|
}
|
||||||
return patientSearchParam;
|
return patientSearchParam;
|
||||||
|
@ -148,9 +201,8 @@ public class SearchParameterUtil {
|
||||||
|
|
||||||
public static Set<String> getAllResourceTypesThatAreInPatientCompartment(FhirContext theFhirContext) {
|
public static Set<String> getAllResourceTypesThatAreInPatientCompartment(FhirContext theFhirContext) {
|
||||||
return theFhirContext.getResourceTypes().stream()
|
return theFhirContext.getResourceTypes().stream()
|
||||||
.filter(type -> getAllPatientCompartmentRuntimeSearchParamsForResourceType(theFhirContext, type)
|
.filter(type -> CollectionUtils.isNotEmpty(
|
||||||
.size()
|
getAllPatientCompartmentRuntimeSearchParamsForResourceType(theFhirContext, type)))
|
||||||
> 0)
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,9 +217,7 @@ public class SearchParameterUtil {
|
||||||
*/
|
*/
|
||||||
public static boolean isResourceTypeInPatientCompartment(FhirContext theFhirContext, String theResourceType) {
|
public static boolean isResourceTypeInPatientCompartment(FhirContext theFhirContext, String theResourceType) {
|
||||||
RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType);
|
RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType);
|
||||||
return getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition)
|
return CollectionUtils.isNotEmpty(getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition));
|
||||||
.size()
|
|
||||||
> 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
|
|
|
@ -62,7 +62,7 @@ public class PagingIteratorTest {
|
||||||
public void next_fetchTest_fetchesAndReturns() {
|
public void next_fetchTest_fetchesAndReturns() {
|
||||||
// 3 cases to make sure we get the edge cases
|
// 3 cases to make sure we get the edge cases
|
||||||
for (int adj : new int[] { -1, 0, 1 }) {
|
for (int adj : new int[] { -1, 0, 1 }) {
|
||||||
int size = PagingIterator.PAGE_SIZE + adj;
|
int size = PagingIterator.DEFAULT_PAGE_SIZE + adj;
|
||||||
|
|
||||||
myPagingIterator = createPagingIterator(size);
|
myPagingIterator = createPagingIterator(size);
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-bom</artifactId>
|
<artifactId>hapi-fhir-bom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<name>HAPI FHIR BOM</name>
|
<name>HAPI FHIR BOM</name>
|
||||||
|
@ -12,7 +12,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.cli;
|
package ca.uhn.fhir.cli;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||||
|
@ -31,6 +32,7 @@ import ca.uhn.fhir.system.HapiSystemProperties;
|
||||||
import ca.uhn.fhir.util.AttachmentUtil;
|
import ca.uhn.fhir.util.AttachmentUtil;
|
||||||
import ca.uhn.fhir.util.FileUtil;
|
import ca.uhn.fhir.util.FileUtil;
|
||||||
import ca.uhn.fhir.util.ParametersUtil;
|
import ca.uhn.fhir.util.ParametersUtil;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
|
@ -265,7 +267,7 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand {
|
||||||
"Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
|
"Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addFileToRequestBundle(IBaseParameters theInputParameters, String theFileName, byte[] theBytes) {
|
protected void addFileToRequestBundle(IBaseParameters theInputParameters, String theFileName, byte[] theBytes) {
|
||||||
|
|
||||||
byte[] bytes = theBytes;
|
byte[] bytes = theBytes;
|
||||||
String fileName = theFileName;
|
String fileName = theFileName;
|
||||||
|
@ -277,7 +279,7 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand {
|
||||||
FileUtil.formatFileSize(ourTransferSizeLimit));
|
FileUtil.formatFileSize(ourTransferSizeLimit));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
File tempFile = File.createTempFile("hapi-fhir-cli", suffix);
|
File tempFile = File.createTempFile("hapi-fhir-cli", "." + suffix);
|
||||||
tempFile.deleteOnExit();
|
tempFile.deleteOnExit();
|
||||||
try (OutputStream fileOutputStream = new FileOutputStream(tempFile, false)) {
|
try (OutputStream fileOutputStream = new FileOutputStream(tempFile, false)) {
|
||||||
fileOutputStream.write(bytes);
|
fileOutputStream.write(bytes);
|
||||||
|
@ -363,4 +365,9 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand {
|
||||||
}
|
}
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
void setFhirContext(FhirContext theFhirContext) {
|
||||||
|
myFhirCtx = theFhirContext;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,10 @@ import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyS
|
||||||
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
|
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
|
||||||
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
|
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
|
||||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||||
|
import org.hl7.fhir.r4.model.Attachment;
|
||||||
|
import org.hl7.fhir.r4.model.Parameters;
|
||||||
|
import org.hl7.fhir.r4.model.Type;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
import org.junit.jupiter.api.AfterEach;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
@ -43,6 +47,7 @@ import java.io.FileOutputStream;
|
||||||
import java.io.FileWriter;
|
import java.io.FileWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
import java.util.zip.ZipEntry;
|
import java.util.zip.ZipEntry;
|
||||||
import java.util.zip.ZipOutputStream;
|
import java.util.zip.ZipOutputStream;
|
||||||
|
@ -54,6 +59,8 @@ import static org.hamcrest.Matchers.greaterThan;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.matchesPattern;
|
import static org.hamcrest.Matchers.matchesPattern;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyList;
|
import static org.mockito.ArgumentMatchers.anyList;
|
||||||
|
@ -479,6 +486,86 @@ public class UploadTerminologyCommandTest {
|
||||||
uploadICD10UsingCompressedFile(theFhirVersion, theIncludeTls);
|
uploadICD10UsingCompressedFile(theFhirVersion, theIncludeTls);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("paramsProvider")
|
||||||
|
@SuppressWarnings("unused") // Both params for @BeforeEach
|
||||||
|
void testZipFileInParameters(String theFhirVersion, boolean theIncludeTls) {
|
||||||
|
final IBaseParameters inputParameters = switch (myCtx.getVersion().getVersion()) {
|
||||||
|
case DSTU2, DSTU2_HL7ORG, DSTU2_1 -> new org.hl7.fhir.dstu2.model.Parameters();
|
||||||
|
case DSTU3 -> new org.hl7.fhir.dstu3.model.Parameters();
|
||||||
|
case R4 -> new Parameters();
|
||||||
|
case R4B -> new org.hl7.fhir.r4b.model.Parameters();
|
||||||
|
case R5 -> new org.hl7.fhir.r5.model.Parameters();
|
||||||
|
};
|
||||||
|
|
||||||
|
final UploadTerminologyCommand uploadTerminologyCommand = new UploadTerminologyCommand();
|
||||||
|
uploadTerminologyCommand.setFhirContext(myCtx);
|
||||||
|
uploadTerminologyCommand.setTransferSizeBytes(1);
|
||||||
|
|
||||||
|
uploadTerminologyCommand.addFileToRequestBundle(inputParameters, "something.zip", new byte[] {1,2});
|
||||||
|
|
||||||
|
final String actualAttachmentUrl = getAttachmentUrl(inputParameters, myCtx);
|
||||||
|
assertTrue(actualAttachmentUrl.endsWith(".zip"));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String getAttachmentUrl(IBaseParameters theInputParameters, FhirContext theCtx) {
|
||||||
|
switch (theCtx.getVersion().getVersion()) {
|
||||||
|
case DSTU2:
|
||||||
|
case DSTU2_HL7ORG:
|
||||||
|
case DSTU2_1: {
|
||||||
|
assertInstanceOf(org.hl7.fhir.dstu2.model.Parameters.class, theInputParameters);
|
||||||
|
final org.hl7.fhir.dstu2.model.Parameters dstu2Parameters = (org.hl7.fhir.dstu2.model.Parameters) theInputParameters;
|
||||||
|
final List<org.hl7.fhir.dstu2.model.Parameters.ParametersParameterComponent> dstu2ParametersList = dstu2Parameters.getParameter();
|
||||||
|
final Optional<org.hl7.fhir.dstu2.model.Parameters.ParametersParameterComponent> optDstu2FileParam = dstu2ParametersList.stream().filter(param -> TerminologyUploaderProvider.PARAM_FILE.equals(param.getName())).findFirst();
|
||||||
|
assertTrue(optDstu2FileParam.isPresent());
|
||||||
|
final org.hl7.fhir.dstu2.model.Type dstu2Value = optDstu2FileParam.get().getValue();
|
||||||
|
assertInstanceOf(org.hl7.fhir.dstu2.model.Attachment.class, dstu2Value);
|
||||||
|
final org.hl7.fhir.dstu2.model.Attachment dstu2Attachment = (org.hl7.fhir.dstu2.model.Attachment) dstu2Value;
|
||||||
|
return dstu2Attachment.getUrl();
|
||||||
|
}
|
||||||
|
case DSTU3: {
|
||||||
|
assertInstanceOf(org.hl7.fhir.dstu3.model.Parameters.class, theInputParameters);
|
||||||
|
final org.hl7.fhir.dstu3.model.Parameters dstu3Parameters = (org.hl7.fhir.dstu3.model.Parameters) theInputParameters;
|
||||||
|
final List<org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent> dstu3ParametersList = dstu3Parameters.getParameter();
|
||||||
|
final Optional<org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent> optDstu3FileParam = dstu3ParametersList.stream().filter(param -> TerminologyUploaderProvider.PARAM_FILE.equals(param.getName())).findFirst();
|
||||||
|
assertTrue(optDstu3FileParam.isPresent());
|
||||||
|
final org.hl7.fhir.dstu3.model.Type dstu3Value = optDstu3FileParam.get().getValue();
|
||||||
|
assertInstanceOf(org.hl7.fhir.dstu3.model.Attachment.class, dstu3Value);
|
||||||
|
final org.hl7.fhir.dstu3.model.Attachment dstu3Attachment = (org.hl7.fhir.dstu3.model.Attachment) dstu3Value;
|
||||||
|
return dstu3Attachment.getUrl();
|
||||||
|
}
|
||||||
|
case R4: {
|
||||||
|
assertInstanceOf(Parameters.class, theInputParameters);
|
||||||
|
final Parameters r4Parameters = (Parameters) theInputParameters;
|
||||||
|
final Parameters.ParametersParameterComponent r4Parameter = r4Parameters.getParameter(TerminologyUploaderProvider.PARAM_FILE);
|
||||||
|
final Type r4Value = r4Parameter.getValue();
|
||||||
|
assertInstanceOf(Attachment.class, r4Value);
|
||||||
|
final Attachment r4Attachment = (Attachment) r4Value;
|
||||||
|
return r4Attachment.getUrl();
|
||||||
|
}
|
||||||
|
case R4B: {
|
||||||
|
assertInstanceOf(org.hl7.fhir.r4b.model.Parameters.class, theInputParameters);
|
||||||
|
final org.hl7.fhir.r4b.model.Parameters r4bParameters = (org.hl7.fhir.r4b.model.Parameters) theInputParameters;
|
||||||
|
final org.hl7.fhir.r4b.model.Parameters.ParametersParameterComponent r4bParameter = r4bParameters.getParameter(TerminologyUploaderProvider.PARAM_FILE);
|
||||||
|
final org.hl7.fhir.r4b.model.DataType value = r4bParameter.getValue();
|
||||||
|
assertInstanceOf(org.hl7.fhir.r4b.model.Attachment.class, value);
|
||||||
|
final org.hl7.fhir.r4b.model.Attachment r4bAttachment = (org.hl7.fhir.r4b.model.Attachment) value;
|
||||||
|
return r4bAttachment.getUrl();
|
||||||
|
}
|
||||||
|
case R5: {
|
||||||
|
assertInstanceOf(org.hl7.fhir.r5.model.Parameters.class, theInputParameters);
|
||||||
|
final org.hl7.fhir.r5.model.Parameters r4Parameters = (org.hl7.fhir.r5.model.Parameters) theInputParameters;
|
||||||
|
final org.hl7.fhir.r5.model.Parameters.ParametersParameterComponent parameter = r4Parameters.getParameter(TerminologyUploaderProvider.PARAM_FILE);
|
||||||
|
final org.hl7.fhir.r5.model.DataType value = parameter.getValue();
|
||||||
|
assertInstanceOf(org.hl7.fhir.r5.model.Attachment.class, value);
|
||||||
|
final org.hl7.fhir.r5.model.Attachment attachment = (org.hl7.fhir.r5.model.Attachment) value;
|
||||||
|
return attachment.getUrl();
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException("Unknown FHIR version: " + theCtx.getVersion().getVersion());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void uploadICD10UsingCompressedFile(String theFhirVersion, boolean theIncludeTls) throws IOException {
|
private void uploadICD10UsingCompressedFile(String theFhirVersion, boolean theIncludeTls) throws IOException {
|
||||||
if (FHIR_VERSION_DSTU3.equals(theFhirVersion)) {
|
if (FHIR_VERSION_DSTU3.equals(theFhirVersion)) {
|
||||||
when(myTermLoaderSvc.loadIcd10cm(anyList(), any())).thenReturn(new UploadStatistics(100, new org.hl7.fhir.dstu3.model.IdType("CodeSystem/101")));
|
when(myTermLoaderSvc.loadIcd10cm(anyList(), any())).thenReturn(new UploadStatistics(100, new org.hl7.fhir.dstu3.model.IdType("CodeSystem/101")));
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-cli</artifactId>
|
<artifactId>hapi-fhir-cli</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5745
|
||||||
|
title: "Added another state to the Batch2 work chunk state machine: `READY`.
|
||||||
|
This work chunk state will be the initial state on creation.
|
||||||
|
Once queued for delivery, they will transition to `QUEUED`.
|
||||||
|
The exception is for ReductionStep chunks (because reduction steps
|
||||||
|
are not read off of the queue, but executed by the maintenance job
|
||||||
|
inline.
|
||||||
|
"
|
|
@ -0,0 +1,9 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5767
|
||||||
|
title: "Added new `POLL_WAITING` state for WorkChunks in batch jobs.
|
||||||
|
Also added RetryChunkLaterException for jobs that have steps that
|
||||||
|
need to be retried at a later time (can be provided optionally to exception).
|
||||||
|
If a step throws this new exception, it will be set with the new
|
||||||
|
`POLL_WAITING` status and retried at a later time.
|
||||||
|
"
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5818
|
||||||
|
title: "Added another state to the Batch2 work chunk state machine: `GATE_WAITING`.
|
||||||
|
This work chunk state will be the initial state on creation for gated jobs.
|
||||||
|
Once all chunks are completed for the previous step, they will transition to `READY`.
|
||||||
|
"
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5861
|
||||||
|
title: "Enhance RuleBuilder code to support multiple instance IDs."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5865
|
||||||
|
title: "Moving the Hibernate.Search annotation for text indexing from the lob column to the column added as part of the
|
||||||
|
PostgreSql LOB migration."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5877
|
||||||
|
title: "Previously, updating a tokenParam with a value greater than 200 characters would raise a SQLException.
|
||||||
|
This issue has been fixed."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5886
|
||||||
|
title: "Previously, either updating links on, or deleting one of two patients with non-numeric IDs linked to a golden
|
||||||
|
patient would result in a HAPI-0389 if there were survivorship rules.
|
||||||
|
This issue has been fixed for both the update links and delete cases."
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5888
|
||||||
|
title: "Updated documentation on binary_security_interceptor to specify using
|
||||||
|
`STORAGE_PRE_INITIATE_BULK_EXPORT` not `STORAGE_INITIATE_BULK_EXPORT` pointcut
|
||||||
|
to change bulk export parameters.
|
||||||
|
"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5890
|
||||||
|
title: "As part of the migration from LOB, provided the capability to force persisting data to LOB columns. The default
|
||||||
|
behavior is to not persist in lob columns."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5893
|
||||||
|
title: "Previously, hapi-fhir-cli: upload-terminology failed with a HAPI-0862 error when uploading LOINC.
|
||||||
|
This has been fixed."
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5898
|
||||||
|
title: "Previously, triggering a `$meta` via GET on a new patient with Megascale configured resulted in error HAPI-0389. This has been corrected
|
||||||
|
This has been fixed."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5899
|
||||||
|
title: "The `MDM_POST_MERGE_GOLDEN_RESOURCES` now supports an additional parameter, of type `ca.uhn.fhir.mdm.model.MdmTransactionContext`. Thanks to Jens Villadsen for the contribution."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5904
|
||||||
|
title: "Chained sort would exclude results that did not have resources matching the sort chain. These are now included, and sorted at the end."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5915
|
||||||
|
title: "Previously, in some edge case scenarios the Bulk Export Rule Applier could accidentally permit a Patient type level bulk export request, even if the calling user only had permissions to a subset of patients. This has been corrected."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5917
|
||||||
|
title: "Fix chained sorts on strings when using MS Sql"
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 3986
|
||||||
|
title: "Removed the validation error of the `max` query parameter on the $lastn operation. Now, the $lastn operation can be invoked with the `max` query parameter. Contribution by Gijs Groenewegen (@thetrueoneshots)."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4556
|
||||||
|
title: "The CSS file used by the OpenApiInterceptor to serve up the Swagger UI
|
||||||
|
component inadvertently blocked the authorization button evem when it was
|
||||||
|
wanted. This has been fixed. Thanks Jesse Bonzo for the contribution!"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5900
|
||||||
|
jira: SMILE-7435
|
||||||
|
title: "Remove unnecessary call to deleteAllSearchParams when $expunge is called on an already-deleted resource."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5913
|
||||||
|
title: "Bulk export was failing with a HAPI-2222 error with the persistence module configured for R5.
|
||||||
|
This has been fixed."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5925
|
||||||
|
title: "An UnsupportedOperationException occurred when validating R5 MHD bundles using
|
||||||
|
the HAPI FHIR validator. Thanks to Renaud Subiger for contributing a fix!"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5933
|
||||||
|
title: "Bulk export was failing with a HAPI-1775 error with the persistence module configured for DSTU3.
|
||||||
|
This has been fixed."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5935
|
||||||
|
title: "Remote Terminology Service can now return subproperty fields for a CodeSystem lookup operation.
|
||||||
|
This can be done in DSTU3 and R4. R5 is not yet implemented."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5938
|
||||||
|
title: "Generated IPS documents will now include a bundle profile declaration."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5945
|
||||||
|
title: "A new utility method has been added to `BundleUtil` which converts a FHIR Bundle
|
||||||
|
containing resources (e.g. a search result bundle) into a FHIR transaction bundle which
|
||||||
|
could be used to upload those resources to a server."
|
|
@ -14,4 +14,9 @@ This interceptor is intended to be subclassed. A simple example is shown below:
|
||||||
|
|
||||||
## Combining with Bulk Export
|
## Combining with Bulk Export
|
||||||
|
|
||||||
The `setBinarySecurityContextIdentifierSystem(..)` and `setBinarySecurityContextIdentifierValue(..)` properties on the `BulkExportJobParameters` object can be used to automatically populate the security context on Binary resources created by Bulk Export jobs with values that can be verified by this interceptor. An interceptor on the `STORAGE_INITIATE_BULK_EXPORT` pointcut is the easiest way to set these properties when a new Bulk Export job is being kicked off.
|
The `setBinarySecurityContextIdentifierSystem(..)` and `setBinarySecurityContextIdentifierValue(..)` properties on the `BulkExportJobParameters` object can be used to automatically populate the security context on Binary resources created by Bulk Export jobs with values that can be verified by this interceptor.
|
||||||
|
An interceptor on the `STORAGE_PRE_INITIATE_BULK_EXPORT` pointcut is the recommended way to set these properties when a new Bulk Export job is being kicked off.
|
||||||
|
|
||||||
|
NB: Previous versions recommended using the `STORAGE_INITIATE_BULK_EXPORT` pointcut, but this is no longer the recommended way.
|
||||||
|
`STORAGE_PRE_INITIATE_BULK_EXPORT` pointcut is called before `STORAGE_INITIATE_BULK_EXPORT` and is thus guaranteed to be called before
|
||||||
|
any AuthorizationInterceptors.
|
||||||
|
|
|
@ -47,15 +47,25 @@ stateDiagram-v2
|
||||||
title: Batch2 Job Work Chunk state transitions
|
title: Batch2 Job Work Chunk state transitions
|
||||||
---
|
---
|
||||||
stateDiagram-v2
|
stateDiagram-v2
|
||||||
|
state GATE_WAITING
|
||||||
|
state READY
|
||||||
|
state REDUCTION_READY
|
||||||
state QUEUED
|
state QUEUED
|
||||||
state on_receive <<choice>>
|
state on_receive <<choice>>
|
||||||
state IN_PROGRESS
|
state IN_PROGRESS
|
||||||
state ERROR
|
state ERROR
|
||||||
|
state POLL_WAITING
|
||||||
state execute <<choice>>
|
state execute <<choice>>
|
||||||
state FAILED
|
state FAILED
|
||||||
state COMPLETED
|
state COMPLETED
|
||||||
direction LR
|
direction LR
|
||||||
[*] --> QUEUED : on create
|
[*] --> READY : on create - normal or gated jobs first chunks
|
||||||
|
[*] --> GATE_WAITING : on create - gated jobs for all but the first chunks of the first step
|
||||||
|
GATE_WAITING --> READY : on gate release - gated
|
||||||
|
GATE_WAITING --> REDUCTION_READY : on gate release for the final reduction step (all reduction jobs are gated)
|
||||||
|
QUEUED --> READY : on gate release - gated (for compatibility with legacy QUEUED state up to Hapi-fhir version 7.1)
|
||||||
|
READY --> QUEUED : placed on kafka (maint.)
|
||||||
|
POLL_WAITING --> READY : after a poll delay on a POLL_WAITING work chunk has elapsed
|
||||||
|
|
||||||
%% worker processing states
|
%% worker processing states
|
||||||
QUEUED --> on_receive : on deque by worker
|
QUEUED --> on_receive : on deque by worker
|
||||||
|
@ -65,6 +75,7 @@ stateDiagram-v2
|
||||||
execute --> ERROR : on re-triable error
|
execute --> ERROR : on re-triable error
|
||||||
execute --> COMPLETED : success\n maybe trigger instance first_step_finished
|
execute --> COMPLETED : success\n maybe trigger instance first_step_finished
|
||||||
execute --> FAILED : on unrecoverable \n or too many errors
|
execute --> FAILED : on unrecoverable \n or too many errors
|
||||||
|
execute --> POLL_WAITING : job step has throw a RetryChunkLaterException and must be tried again after the provided poll delay
|
||||||
|
|
||||||
%% temporary error state until retry
|
%% temporary error state until retry
|
||||||
ERROR --> on_receive : exception rollback\n triggers redelivery
|
ERROR --> on_receive : exception rollback\n triggers redelivery
|
||||||
|
|
|
@ -19,36 +19,54 @@ A HAPI-FHIR batch job definition consists of a job name, version, parameter json
|
||||||
After a job has been defined, *instances* of that job can be submitted for batch processing by populating a `JobInstanceStartRequest` with the job name and job parameters json and then submitting that request to the Batch Job Coordinator.
|
After a job has been defined, *instances* of that job can be submitted for batch processing by populating a `JobInstanceStartRequest` with the job name and job parameters json and then submitting that request to the Batch Job Coordinator.
|
||||||
|
|
||||||
The Batch Job Coordinator will then store two records in the database:
|
The Batch Job Coordinator will then store two records in the database:
|
||||||
- Job Instance with status QUEUED: that is the parent record for all data concerning this job
|
- Job Instance with status `QUEUED`: that is the parent record for all data concerning this job
|
||||||
- Batch Work Chunk with status QUEUED: this describes the first "chunk" of work required for this job. The first Batch Work Chunk contains no data.
|
- Batch Work Chunk with status `READY`: this describes the first "chunk" of work required for this job. The first Batch Work Chunk contains no data.
|
||||||
|
|
||||||
Lastly the Batch Job Coordinator publishes a message to the Batch Notification Message Channel (named `batch2-work-notification`) to inform worker threads that this first chunk of work is now ready for processing.
|
### The Maintenance Job
|
||||||
|
|
||||||
### Job Processing - First Step
|
A Scheduled Job runs periodically (once a minute). For each Job Instance in the database, it:
|
||||||
|
|
||||||
HAPI-FHIR Batch Jobs run based on job notification messages. The process is kicked off by the first chunk of work. When this notification message arrives, the message handler makes a single call to the first step defined in the job definition, passing in the job parameters as input.
|
1. Calculates job progress (% of work chunks in `COMPLETE` status). If the job is finished, purges any left over work chunks still in the database.
|
||||||
|
1. Moves all `POLL_WAITING` work chunks to `READY` if their `nextPollTime` has expired.
|
||||||
|
1. Calculates job progress (% of work chunks in `COMPLETE` status). If the job is finished, purges any leftover work chunks still in the database.
|
||||||
|
1. Cleans up any complete, failed, or cancelled jobs that need to be removed.
|
||||||
|
1. When the current step is complete, moves any gated jobs onto their next step and updates all chunks in `GATE_WAITING` to `READY`. If the the job is being moved to its final reduction step, chunks are moved from `GATE_WAITING` to `REDUCTION_READY`.
|
||||||
|
1. If the final step of a gated job is a reduction step, a reduction step execution will be triggered. All workchunks for the job in `REDUCTION_READY` will be consumed at this point.
|
||||||
|
1. Moves all `READY` work chunks into the `QUEUED` state and publishes a message to the Batch Notification Message Channel to inform worker threads that a work chunk is now ready for processing. \*
|
||||||
|
|
||||||
The handler then does the following:
|
\* An exception is for the final reduction step, where work chunks are not published to the Batch Notification Message Channel,
|
||||||
1. Change the work chunk status from QUEUED to IN_PROGRESS
|
but instead processed inline.
|
||||||
2. Change the Job Instance status from QUEUED to IN_PROGRESS
|
|
||||||
3. If the Job Instance is cancelled, change the status to CANCELLED and abort processing.
|
|
||||||
4. The first step of the job definition is executed with the job parameters
|
|
||||||
5. This step creates new work chunks. For each work chunk it creates, it json serializes the work chunk data, stores it in the database, and publishes a new message to the Batch Notification Message Channel to notify worker threads that there are new work chunks waiting to be processed.
|
|
||||||
6. If the step succeeded, the work chunk status is changed from IN_PROGRESS to COMPLETED, and the data it contained is deleted.
|
|
||||||
7. If the step failed, the work chunk status is changed from IN_PROGRESS to either ERRORED or FAILED depending on the severity of the error.
|
|
||||||
|
|
||||||
### Job Processing - Middle steps
|
### Batch Notification Message Handler
|
||||||
|
|
||||||
Middle Steps in the job definition are executed in the same way, except instead of only using the Job Parameters as input, they use both the Job Parameters and the Work Chunk data produced from the previous step.
|
HAPI-FHIR Batch Jobs run based on job notification messages of the Batch Notification Message Channel (named `batch2-work-notification`).
|
||||||
|
|
||||||
### Job Processing - Final Step
|
When a notification message arrives, the handler does the following:
|
||||||
|
|
||||||
|
1. Change the work chunk status from `QUEUED` to `IN_PROGRESS`
|
||||||
|
1. Change the Job Instance status from `QUEUED` to `IN_PROGRESS`
|
||||||
|
1. If the Job Instance is cancelled, change the status to `CANCELLED` and abort processing
|
||||||
|
1. If the step creates new work chunks, each work chunk will be created in either the `GATE_WAITING` state (for gated jobs) or `READY` state (for non-gated jobs) and will be handled in the next maintenance job pass.
|
||||||
|
1. If the step succeeds, the work chunk status is changed from `IN_PROGRESS` to `COMPLETED`, and the data it contained is deleted.
|
||||||
|
1. If the step throws a `RetryChunkLaterException`, the work chunk status is changed from `IN_PROGRESS` to `POLL_WAITING`, and a `nextPollTime` value will be set.
|
||||||
|
1. If the step fails, the work chunk status is changed from `IN_PROGRESS` to either `ERRORED` or `FAILED`, depending on the severity of the error.
|
||||||
|
|
||||||
|
### First Step
|
||||||
|
|
||||||
|
The first step in a job definition is executed with just the job parameters.
|
||||||
|
|
||||||
|
### Middle steps
|
||||||
|
|
||||||
|
Middle Steps in the job definition are executed using the initial Job Parameters and the Work Chunk data produced from the previous step.
|
||||||
|
|
||||||
|
### Final Step
|
||||||
|
|
||||||
The final step operates the same way as the middle steps, except it does not produce any new work chunks.
|
The final step operates the same way as the middle steps, except it does not produce any new work chunks.
|
||||||
|
|
||||||
### Gated Execution
|
### Gated Execution
|
||||||
|
|
||||||
If a Job Definition is set to having Gated Execution, then all work chunks for one step must be COMPLETED before any work chunks for the next step may begin.
|
If a Job Definition is set to having Gated Execution, then all work chunks for a step must be `COMPLETED` before any work chunks for the next step may begin.
|
||||||
|
|
||||||
### Job Instance Completion
|
### Job Instance Completion
|
||||||
|
|
||||||
A Batch Job Maintenance Service runs every minute to monitor the status of all Job Instances and the Job Instance is transitioned to either COMPLETED, ERRORED or FAILED according to the status of all outstanding work chunks for that job instance. If the job instance is still IN_PROGRESS this maintenance service also estimates the time remaining to complete the job.
|
A Batch Job Maintenance Service runs every minute to monitor the status of all Job Instances and the Job Instance is transitioned to either `COMPLETED`, `ERRORED` or `FAILED` according to the status of all outstanding work chunks for that job instance. If the job instance is still `IN_PROGRESS` this maintenance service also estimates the time remaining to complete the job.
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -37,6 +37,17 @@ public interface IHapiScheduler {
|
||||||
|
|
||||||
void logStatusForUnitTest();
|
void logStatusForUnitTest();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pauses this scheduler (and thus all scheduled jobs).
|
||||||
|
* To restart call {@link #unpause()}
|
||||||
|
*/
|
||||||
|
void pause();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Restarts this scheduler after {@link #pause()}
|
||||||
|
*/
|
||||||
|
void unpause();
|
||||||
|
|
||||||
void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition);
|
void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition);
|
||||||
|
|
||||||
Set<JobKey> getJobKeysForUnitTest() throws SchedulerException;
|
Set<JobKey> getJobKeysForUnitTest() throws SchedulerException;
|
||||||
|
|
|
@ -32,6 +32,20 @@ public interface ISchedulerService {
|
||||||
|
|
||||||
void logStatusForUnitTest();
|
void logStatusForUnitTest();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pauses the scheduler so no new jobs will run.
|
||||||
|
* Useful in tests when cleanup needs to happen but scheduled jobs may
|
||||||
|
* be running
|
||||||
|
*/
|
||||||
|
@VisibleForTesting
|
||||||
|
void pause();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Restarts the scheduler after a previous call to {@link #pause()}.
|
||||||
|
*/
|
||||||
|
@VisibleForTesting
|
||||||
|
void unpause();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This task will execute locally (and should execute on all nodes of the cluster if there is a cluster)
|
* This task will execute locally (and should execute on all nodes of the cluster if there is a cluster)
|
||||||
* @param theIntervalMillis How many milliseconds between passes should this job run
|
* @param theIntervalMillis How many milliseconds between passes should this job run
|
||||||
|
@ -52,6 +66,9 @@ public interface ISchedulerService {
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
Set<JobKey> getClusteredJobKeysForUnitTest() throws SchedulerException;
|
Set<JobKey> getClusteredJobKeysForUnitTest() throws SchedulerException;
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
boolean isSchedulingDisabled();
|
||||||
|
|
||||||
boolean isStopping();
|
boolean isStopping();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -29,6 +29,7 @@ import com.google.common.collect.Sets;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.quartz.JobDataMap;
|
import org.quartz.JobDataMap;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
import org.quartz.JobKey;
|
import org.quartz.JobKey;
|
||||||
import org.quartz.ScheduleBuilder;
|
import org.quartz.ScheduleBuilder;
|
||||||
import org.quartz.Scheduler;
|
import org.quartz.Scheduler;
|
||||||
|
@ -44,11 +45,14 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
|
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
public abstract class BaseHapiScheduler implements IHapiScheduler {
|
public abstract class BaseHapiScheduler implements IHapiScheduler {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiScheduler.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiScheduler.class);
|
||||||
|
|
||||||
|
@ -151,6 +155,42 @@ public abstract class BaseHapiScheduler implements IHapiScheduler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void pause() {
|
||||||
|
int delay = 100;
|
||||||
|
String errorMsg = null;
|
||||||
|
Throwable ex = null;
|
||||||
|
try {
|
||||||
|
int count = 0;
|
||||||
|
myScheduler.standby();
|
||||||
|
while (count < 3) {
|
||||||
|
if (!hasRunningJobs()) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Thread.sleep(delay);
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
if (count >= 3) {
|
||||||
|
errorMsg = "Scheduler on standby. But after " + (count + 1) * delay
|
||||||
|
+ " ms there are still jobs running. Execution will continue, but may cause bugs.";
|
||||||
|
}
|
||||||
|
} catch (Exception x) {
|
||||||
|
ex = x;
|
||||||
|
errorMsg = "Failed to set to standby. Execution will continue, but may cause bugs.";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isNotBlank(errorMsg)) {
|
||||||
|
if (ex != null) {
|
||||||
|
ourLog.warn(errorMsg, ex);
|
||||||
|
} else {
|
||||||
|
ourLog.warn(errorMsg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void unpause() {
|
||||||
|
start();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void clear() throws SchedulerException {
|
public void clear() throws SchedulerException {
|
||||||
myScheduler.clear();
|
myScheduler.clear();
|
||||||
|
@ -168,6 +208,16 @@ public abstract class BaseHapiScheduler implements IHapiScheduler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean hasRunningJobs() {
|
||||||
|
try {
|
||||||
|
List<JobExecutionContext> currentlyExecutingJobs = myScheduler.getCurrentlyExecutingJobs();
|
||||||
|
ourLog.info("Checking for running jobs. Found {} running.", currentlyExecutingJobs);
|
||||||
|
return !currentlyExecutingJobs.isEmpty();
|
||||||
|
} catch (SchedulerException ex) {
|
||||||
|
throw new RuntimeException(Msg.code(2521) + " Failed during check for scheduled jobs", ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
|
public void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
|
||||||
Validate.isTrue(theIntervalMillis >= 100);
|
Validate.isTrue(theIntervalMillis >= 100);
|
||||||
|
|
|
@ -136,7 +136,7 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService {
|
||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isSchedulingDisabled() {
|
public boolean isSchedulingDisabled() {
|
||||||
return !isLocalSchedulingEnabled() || isSchedulingDisabledForUnitTests();
|
return !isLocalSchedulingEnabled() || isSchedulingDisabledForUnitTests();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,6 +198,18 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService {
|
||||||
myClusteredScheduler.logStatusForUnitTest();
|
myClusteredScheduler.logStatusForUnitTest();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void pause() {
|
||||||
|
myLocalScheduler.pause();
|
||||||
|
myClusteredScheduler.pause();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void unpause() {
|
||||||
|
myLocalScheduler.unpause();
|
||||||
|
myClusteredScheduler.unpause();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void scheduleLocalJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
|
public void scheduleLocalJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
|
||||||
scheduleJob("local", myLocalScheduler, theIntervalMillis, theJobDefinition);
|
scheduleJob("local", myLocalScheduler, theIntervalMillis, theJobDefinition);
|
||||||
|
|
|
@ -53,6 +53,16 @@ public class HapiNullScheduler implements IHapiScheduler {
|
||||||
@Override
|
@Override
|
||||||
public void logStatusForUnitTest() {}
|
public void logStatusForUnitTest() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void pause() {
|
||||||
|
// nothing to do
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void unpause() {
|
||||||
|
// nothing to do
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
|
public void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
|
||||||
ourLog.debug("Skipping scheduling job {} since scheduling is disabled", theJobDefinition.getId());
|
ourLog.debug("Skipping scheduling job {} since scheduling is disabled", theJobDefinition.getId());
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -123,6 +123,8 @@ class JobInstanceUtil {
|
||||||
retVal.setErrorMessage(theEntity.getErrorMessage());
|
retVal.setErrorMessage(theEntity.getErrorMessage());
|
||||||
retVal.setErrorCount(theEntity.getErrorCount());
|
retVal.setErrorCount(theEntity.getErrorCount());
|
||||||
retVal.setRecordsProcessed(theEntity.getRecordsProcessed());
|
retVal.setRecordsProcessed(theEntity.getRecordsProcessed());
|
||||||
|
retVal.setNextPollTime(theEntity.getNextPollTime());
|
||||||
|
retVal.setPollAttempts(theEntity.getPollAttempts());
|
||||||
// note: may be null out if queried NoData
|
// note: may be null out if queried NoData
|
||||||
retVal.setData(theEntity.getSerializedData());
|
retVal.setData(theEntity.getSerializedData());
|
||||||
retVal.setWarningMessage(theEntity.getWarningMessage());
|
retVal.setWarningMessage(theEntity.getWarningMessage());
|
||||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.batch2.config.BaseBatch2Config;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkMetadataViewRepository;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
import jakarta.persistence.EntityManager;
|
import jakarta.persistence.EntityManager;
|
||||||
|
@ -39,12 +40,14 @@ public class JpaBatch2Config extends BaseBatch2Config {
|
||||||
public IJobPersistence batch2JobInstancePersister(
|
public IJobPersistence batch2JobInstancePersister(
|
||||||
IBatch2JobInstanceRepository theJobInstanceRepository,
|
IBatch2JobInstanceRepository theJobInstanceRepository,
|
||||||
IBatch2WorkChunkRepository theWorkChunkRepository,
|
IBatch2WorkChunkRepository theWorkChunkRepository,
|
||||||
|
IBatch2WorkChunkMetadataViewRepository theWorkChunkMetadataViewRepo,
|
||||||
IHapiTransactionService theTransactionService,
|
IHapiTransactionService theTransactionService,
|
||||||
EntityManager theEntityManager,
|
EntityManager theEntityManager,
|
||||||
IInterceptorBroadcaster theInterceptorBroadcaster) {
|
IInterceptorBroadcaster theInterceptorBroadcaster) {
|
||||||
return new JpaJobPersistenceImpl(
|
return new JpaJobPersistenceImpl(
|
||||||
theJobInstanceRepository,
|
theJobInstanceRepository,
|
||||||
theWorkChunkRepository,
|
theWorkChunkRepository,
|
||||||
|
theWorkChunkMetadataViewRepo,
|
||||||
theTransactionService,
|
theTransactionService,
|
||||||
theEntityManager,
|
theEntityManager,
|
||||||
theInterceptorBroadcaster);
|
theInterceptorBroadcaster);
|
||||||
|
|
|
@ -28,16 +28,19 @@ import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||||
import ca.uhn.fhir.batch2.model.WorkChunkCompletionEvent;
|
import ca.uhn.fhir.batch2.model.WorkChunkCompletionEvent;
|
||||||
import ca.uhn.fhir.batch2.model.WorkChunkCreateEvent;
|
import ca.uhn.fhir.batch2.model.WorkChunkCreateEvent;
|
||||||
import ca.uhn.fhir.batch2.model.WorkChunkErrorEvent;
|
import ca.uhn.fhir.batch2.model.WorkChunkErrorEvent;
|
||||||
|
import ca.uhn.fhir.batch2.model.WorkChunkMetadata;
|
||||||
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
||||||
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
|
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
|
||||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkMetadataViewRepository;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||||
|
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkMetadataView;
|
||||||
import ca.uhn.fhir.model.api.PagingIterator;
|
import ca.uhn.fhir.model.api.PagingIterator;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
|
@ -64,7 +67,10 @@ import org.springframework.transaction.annotation.Propagation;
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -85,6 +91,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
|
|
||||||
private final IBatch2JobInstanceRepository myJobInstanceRepository;
|
private final IBatch2JobInstanceRepository myJobInstanceRepository;
|
||||||
private final IBatch2WorkChunkRepository myWorkChunkRepository;
|
private final IBatch2WorkChunkRepository myWorkChunkRepository;
|
||||||
|
private final IBatch2WorkChunkMetadataViewRepository myWorkChunkMetadataViewRepo;
|
||||||
private final EntityManager myEntityManager;
|
private final EntityManager myEntityManager;
|
||||||
private final IHapiTransactionService myTransactionService;
|
private final IHapiTransactionService myTransactionService;
|
||||||
private final IInterceptorBroadcaster myInterceptorBroadcaster;
|
private final IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||||
|
@ -95,13 +102,15 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
public JpaJobPersistenceImpl(
|
public JpaJobPersistenceImpl(
|
||||||
IBatch2JobInstanceRepository theJobInstanceRepository,
|
IBatch2JobInstanceRepository theJobInstanceRepository,
|
||||||
IBatch2WorkChunkRepository theWorkChunkRepository,
|
IBatch2WorkChunkRepository theWorkChunkRepository,
|
||||||
|
IBatch2WorkChunkMetadataViewRepository theWorkChunkMetadataViewRepo,
|
||||||
IHapiTransactionService theTransactionService,
|
IHapiTransactionService theTransactionService,
|
||||||
EntityManager theEntityManager,
|
EntityManager theEntityManager,
|
||||||
IInterceptorBroadcaster theInterceptorBroadcaster) {
|
IInterceptorBroadcaster theInterceptorBroadcaster) {
|
||||||
Validate.notNull(theJobInstanceRepository);
|
Validate.notNull(theJobInstanceRepository, "theJobInstanceRepository");
|
||||||
Validate.notNull(theWorkChunkRepository);
|
Validate.notNull(theWorkChunkRepository, "theWorkChunkRepository");
|
||||||
myJobInstanceRepository = theJobInstanceRepository;
|
myJobInstanceRepository = theJobInstanceRepository;
|
||||||
myWorkChunkRepository = theWorkChunkRepository;
|
myWorkChunkRepository = theWorkChunkRepository;
|
||||||
|
myWorkChunkMetadataViewRepo = theWorkChunkMetadataViewRepo;
|
||||||
myTransactionService = theTransactionService;
|
myTransactionService = theTransactionService;
|
||||||
myEntityManager = theEntityManager;
|
myEntityManager = theEntityManager;
|
||||||
myInterceptorBroadcaster = theInterceptorBroadcaster;
|
myInterceptorBroadcaster = theInterceptorBroadcaster;
|
||||||
|
@ -120,23 +129,46 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
entity.setSerializedData(theBatchWorkChunk.serializedData);
|
entity.setSerializedData(theBatchWorkChunk.serializedData);
|
||||||
entity.setCreateTime(new Date());
|
entity.setCreateTime(new Date());
|
||||||
entity.setStartTime(new Date());
|
entity.setStartTime(new Date());
|
||||||
entity.setStatus(WorkChunkStatusEnum.QUEUED);
|
entity.setStatus(getOnCreateStatus(theBatchWorkChunk));
|
||||||
|
|
||||||
ourLog.debug("Create work chunk {}/{}/{}", entity.getInstanceId(), entity.getId(), entity.getTargetStepId());
|
ourLog.debug("Create work chunk {}/{}/{}", entity.getInstanceId(), entity.getId(), entity.getTargetStepId());
|
||||||
ourLog.trace(
|
ourLog.trace(
|
||||||
"Create work chunk data {}/{}: {}", entity.getInstanceId(), entity.getId(), entity.getSerializedData());
|
"Create work chunk data {}/{}: {}", entity.getInstanceId(), entity.getId(), entity.getSerializedData());
|
||||||
myTransactionService.withSystemRequestOnDefaultPartition().execute(() -> myWorkChunkRepository.save(entity));
|
myTransactionService.withSystemRequestOnDefaultPartition().execute(() -> myWorkChunkRepository.save(entity));
|
||||||
|
|
||||||
return entity.getId();
|
return entity.getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the initial onCreate state for the given workchunk.
|
||||||
|
* Gated job chunks start in GATE_WAITING; they will be transitioned to READY during maintenance pass when all
|
||||||
|
* chunks in the previous step are COMPLETED.
|
||||||
|
* Non gated job chunks start in READY
|
||||||
|
*/
|
||||||
|
private static WorkChunkStatusEnum getOnCreateStatus(WorkChunkCreateEvent theBatchWorkChunk) {
|
||||||
|
if (theBatchWorkChunk.isGatedExecution) {
|
||||||
|
return WorkChunkStatusEnum.GATE_WAITING;
|
||||||
|
} else {
|
||||||
|
return WorkChunkStatusEnum.READY;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRED)
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
public Optional<WorkChunk> onWorkChunkDequeue(String theChunkId) {
|
public Optional<WorkChunk> onWorkChunkDequeue(String theChunkId) {
|
||||||
|
// take a lock on the chunk id to ensure that the maintenance run isn't doing anything.
|
||||||
|
Batch2WorkChunkEntity chunkLock =
|
||||||
|
myEntityManager.find(Batch2WorkChunkEntity.class, theChunkId, LockModeType.PESSIMISTIC_WRITE);
|
||||||
|
// remove from the current state to avoid stale data.
|
||||||
|
myEntityManager.detach(chunkLock);
|
||||||
|
|
||||||
// NOTE: Ideally, IN_PROGRESS wouldn't be allowed here. On chunk failure, we probably shouldn't be allowed.
|
// NOTE: Ideally, IN_PROGRESS wouldn't be allowed here. On chunk failure, we probably shouldn't be allowed.
|
||||||
// But how does re-run happen if k8s kills a processor mid run?
|
// But how does re-run happen if k8s kills a processor mid run?
|
||||||
List<WorkChunkStatusEnum> priorStates =
|
List<WorkChunkStatusEnum> priorStates =
|
||||||
List.of(WorkChunkStatusEnum.QUEUED, WorkChunkStatusEnum.ERRORED, WorkChunkStatusEnum.IN_PROGRESS);
|
List.of(WorkChunkStatusEnum.QUEUED, WorkChunkStatusEnum.ERRORED, WorkChunkStatusEnum.IN_PROGRESS);
|
||||||
int rowsModified = myWorkChunkRepository.updateChunkStatusForStart(
|
int rowsModified = myWorkChunkRepository.updateChunkStatusForStart(
|
||||||
theChunkId, new Date(), WorkChunkStatusEnum.IN_PROGRESS, priorStates);
|
theChunkId, new Date(), WorkChunkStatusEnum.IN_PROGRESS, priorStates);
|
||||||
|
|
||||||
if (rowsModified == 0) {
|
if (rowsModified == 0) {
|
||||||
ourLog.info("Attempting to start chunk {} but it was already started.", theChunkId);
|
ourLog.info("Attempting to start chunk {} but it was already started.", theChunkId);
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
|
@ -288,6 +320,22 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void enqueueWorkChunkForProcessing(String theChunkId, Consumer<Integer> theCallback) {
|
||||||
|
int updated = myWorkChunkRepository.updateChunkStatus(
|
||||||
|
theChunkId, WorkChunkStatusEnum.READY, WorkChunkStatusEnum.QUEUED);
|
||||||
|
theCallback.accept(updated);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int updatePollWaitingChunksForJobIfReady(String theInstanceId) {
|
||||||
|
return myWorkChunkRepository.updateWorkChunksForPollWaiting(
|
||||||
|
theInstanceId,
|
||||||
|
Date.from(Instant.now()),
|
||||||
|
Set.of(WorkChunkStatusEnum.POLL_WAITING),
|
||||||
|
WorkChunkStatusEnum.READY);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||||
public List<JobInstance> fetchRecentInstances(int thePageSize, int thePageIndex) {
|
public List<JobInstance> fetchRecentInstances(int thePageSize, int thePageIndex) {
|
||||||
|
@ -333,6 +381,16 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onWorkChunkPollDelay(String theChunkId, Date theDeadline) {
|
||||||
|
int updated = myWorkChunkRepository.updateWorkChunkNextPollTime(
|
||||||
|
theChunkId, WorkChunkStatusEnum.POLL_WAITING, Set.of(WorkChunkStatusEnum.IN_PROGRESS), theDeadline);
|
||||||
|
|
||||||
|
if (updated != 1) {
|
||||||
|
ourLog.warn("Expected to update 1 work chunk's poll delay; but found {}", updated);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onWorkChunkFailed(String theChunkId, String theErrorMessage) {
|
public void onWorkChunkFailed(String theChunkId, String theErrorMessage) {
|
||||||
ourLog.info("Marking chunk {} as failed with message: {}", theChunkId, theErrorMessage);
|
ourLog.info("Marking chunk {} as failed with message: {}", theChunkId, theErrorMessage);
|
||||||
|
@ -383,24 +441,23 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
public Set<WorkChunkStatusEnum> getDistinctWorkChunkStatesForJobAndStep(
|
||||||
public boolean canAdvanceInstanceToNextStep(String theInstanceId, String theCurrentStepId) {
|
String theInstanceId, String theCurrentStepId) {
|
||||||
|
if (getRunningJob(theInstanceId) == null) {
|
||||||
|
return Collections.unmodifiableSet(new HashSet<>());
|
||||||
|
}
|
||||||
|
return myWorkChunkRepository.getDistinctStatusesForStep(theInstanceId, theCurrentStepId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Batch2JobInstanceEntity getRunningJob(String theInstanceId) {
|
||||||
Optional<Batch2JobInstanceEntity> instance = myJobInstanceRepository.findById(theInstanceId);
|
Optional<Batch2JobInstanceEntity> instance = myJobInstanceRepository.findById(theInstanceId);
|
||||||
if (instance.isEmpty()) {
|
if (instance.isEmpty()) {
|
||||||
return false;
|
return null;
|
||||||
}
|
}
|
||||||
if (instance.get().getStatus().isEnded()) {
|
if (instance.get().getStatus().isEnded()) {
|
||||||
return false;
|
return null;
|
||||||
}
|
}
|
||||||
Set<WorkChunkStatusEnum> statusesForStep =
|
return instance.get();
|
||||||
myWorkChunkRepository.getDistinctStatusesForStep(theInstanceId, theCurrentStepId);
|
|
||||||
|
|
||||||
ourLog.debug(
|
|
||||||
"Checking whether gated job can advanced to next step. [instanceId={}, currentStepId={}, statusesForStep={}]",
|
|
||||||
theInstanceId,
|
|
||||||
theCurrentStepId,
|
|
||||||
statusesForStep);
|
|
||||||
return statusesForStep.isEmpty() || statusesForStep.equals(Set.of(WorkChunkStatusEnum.COMPLETED));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchChunks(
|
private void fetchChunks(
|
||||||
|
@ -428,18 +485,16 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> fetchAllChunkIdsForStepWithStatus(
|
public void updateInstanceUpdateTime(String theInstanceId) {
|
||||||
String theInstanceId, String theStepId, WorkChunkStatusEnum theStatusEnum) {
|
myJobInstanceRepository.updateInstanceUpdateTime(theInstanceId, new Date());
|
||||||
return myTransactionService
|
|
||||||
.withSystemRequest()
|
|
||||||
.withPropagation(Propagation.REQUIRES_NEW)
|
|
||||||
.execute(() -> myWorkChunkRepository.fetchAllChunkIdsForStepWithStatus(
|
|
||||||
theInstanceId, theStepId, theStatusEnum));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void updateInstanceUpdateTime(String theInstanceId) {
|
public WorkChunk createWorkChunk(WorkChunk theWorkChunk) {
|
||||||
myJobInstanceRepository.updateInstanceUpdateTime(theInstanceId, new Date());
|
if (theWorkChunk.getId() == null) {
|
||||||
|
theWorkChunk.setId(UUID.randomUUID().toString());
|
||||||
|
}
|
||||||
|
return toChunk(myWorkChunkRepository.save(Batch2WorkChunkEntity.fromWorkChunk(theWorkChunk)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -458,6 +513,15 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
.map(this::toChunk);
|
.map(this::toChunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Page<WorkChunkMetadata> fetchAllWorkChunkMetadataForJobInStates(
|
||||||
|
Pageable thePageable, String theInstanceId, Set<WorkChunkStatusEnum> theStates) {
|
||||||
|
Page<Batch2WorkChunkMetadataView> page =
|
||||||
|
myWorkChunkMetadataViewRepo.fetchWorkChunkMetadataForJobInStates(thePageable, theInstanceId, theStates);
|
||||||
|
|
||||||
|
return page.map(Batch2WorkChunkMetadataView::toChunkMetadata);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean updateInstance(String theInstanceId, JobInstanceUpdateCallback theModifier) {
|
public boolean updateInstance(String theInstanceId, JobInstanceUpdateCallback theModifier) {
|
||||||
Batch2JobInstanceEntity instanceEntity =
|
Batch2JobInstanceEntity instanceEntity =
|
||||||
|
@ -542,4 +606,45 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_BATCH_JOB_CREATE, params);
|
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_BATCH_JOB_CREATE, params);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||||
|
public boolean advanceJobStepAndUpdateChunkStatus(
|
||||||
|
String theJobInstanceId, String theNextStepId, boolean theIsReductionStep) {
|
||||||
|
boolean changed = updateInstance(theJobInstanceId, instance -> {
|
||||||
|
if (instance.getCurrentGatedStepId().equals(theNextStepId)) {
|
||||||
|
// someone else beat us here. No changes
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ourLog.debug("Moving gated instance {} to the next step {}.", theJobInstanceId, theNextStepId);
|
||||||
|
instance.setCurrentGatedStepId(theNextStepId);
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (changed) {
|
||||||
|
ourLog.debug(
|
||||||
|
"Updating chunk status from GATE_WAITING to READY for gated instance {} in step {}.",
|
||||||
|
theJobInstanceId,
|
||||||
|
theNextStepId);
|
||||||
|
WorkChunkStatusEnum nextStep =
|
||||||
|
theIsReductionStep ? WorkChunkStatusEnum.REDUCTION_READY : WorkChunkStatusEnum.READY;
|
||||||
|
// when we reach here, the current step id is equal to theNextStepId
|
||||||
|
// Up to 7.1, gated jobs' work chunks are created in status QUEUED but not actually queued for the
|
||||||
|
// workers.
|
||||||
|
// In order to keep them compatible, turn QUEUED chunks into READY, too.
|
||||||
|
// TODO: 'QUEUED' from the IN clause will be removed after 7.6.0.
|
||||||
|
int numChanged = myWorkChunkRepository.updateAllChunksForStepWithStatus(
|
||||||
|
theJobInstanceId,
|
||||||
|
theNextStepId,
|
||||||
|
List.of(WorkChunkStatusEnum.GATE_WAITING, WorkChunkStatusEnum.QUEUED),
|
||||||
|
nextStep);
|
||||||
|
ourLog.debug(
|
||||||
|
"Updated {} chunks of gated instance {} for step {} from fake QUEUED to READY.",
|
||||||
|
numChanged,
|
||||||
|
theJobInstanceId,
|
||||||
|
theNextStepId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return changed;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.IBinaryStorageEntityDao;
|
||||||
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
|
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.hash.HashingInputStream;
|
import com.google.common.hash.HashingInputStream;
|
||||||
import com.google.common.io.ByteStreams;
|
import com.google.common.io.ByteStreams;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
|
@ -59,6 +60,8 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp
|
||||||
@Autowired
|
@Autowired
|
||||||
private IBinaryStorageEntityDao myBinaryStorageEntityDao;
|
private IBinaryStorageEntityDao myBinaryStorageEntityDao;
|
||||||
|
|
||||||
|
private boolean mySupportLegacyLobServer = false;
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRED)
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
|
@ -96,9 +99,10 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp
|
||||||
entity.setContentId(id);
|
entity.setContentId(id);
|
||||||
entity.setStorageContentBin(loadedStream);
|
entity.setStorageContentBin(loadedStream);
|
||||||
|
|
||||||
// TODO: remove writing Blob in a future release
|
if (mySupportLegacyLobServer) {
|
||||||
Blob dataBlob = lobHelper.createBlob(loadedStream);
|
Blob dataBlob = lobHelper.createBlob(loadedStream);
|
||||||
entity.setBlob(dataBlob);
|
entity.setBlob(dataBlob);
|
||||||
|
}
|
||||||
|
|
||||||
// Update the entity with the final byte count and hash
|
// Update the entity with the final byte count and hash
|
||||||
long bytes = countingInputStream.getByteCount();
|
long bytes = countingInputStream.getByteCount();
|
||||||
|
@ -169,6 +173,11 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp
|
||||||
return copyBinaryContentToByteArray(entityOpt);
|
return copyBinaryContentToByteArray(entityOpt);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public DatabaseBinaryContentStorageSvcImpl setSupportLegacyLobServer(boolean theSupportLegacyLobServer) {
|
||||||
|
mySupportLegacyLobServer = theSupportLegacyLobServer;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
void copyBinaryContentToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity)
|
void copyBinaryContentToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
@ -212,4 +221,10 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public DatabaseBinaryContentStorageSvcImpl setEntityManagerForTesting(EntityManager theEntityManager) {
|
||||||
|
myEntityManager = theEntityManager;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,6 +73,7 @@ import ca.uhn.fhir.jpa.delete.DeleteConflictFinderService;
|
||||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||||
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
||||||
import ca.uhn.fhir.jpa.entity.Search;
|
import ca.uhn.fhir.jpa.entity.Search;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||||
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
|
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
|
||||||
import ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices;
|
import ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices;
|
||||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||||
|
@ -154,6 +155,8 @@ import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.term.TermReadSvcImpl;
|
import ca.uhn.fhir.jpa.term.TermReadSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||||
|
import ca.uhn.fhir.jpa.term.ValueSetConceptAccumulator;
|
||||||
|
import ca.uhn.fhir.jpa.term.ValueSetConceptAccumulatorFactory;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||||
|
@ -822,6 +825,17 @@ public class JpaConfig {
|
||||||
return new TermReadSvcImpl();
|
return new TermReadSvcImpl();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ValueSetConceptAccumulatorFactory valueSetConceptAccumulatorFactory() {
|
||||||
|
return new ValueSetConceptAccumulatorFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Scope("prototype")
|
||||||
|
public ValueSetConceptAccumulator valueSetConceptAccumulator(TermValueSet theTermValueSet) {
|
||||||
|
return valueSetConceptAccumulatorFactory().create(theTermValueSet);
|
||||||
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public ITermCodeSystemStorageSvc termCodeSystemStorageSvc() {
|
public ITermCodeSystemStorageSvc termCodeSystemStorageSvc() {
|
||||||
return new TermCodeSystemStorageSvcImpl();
|
return new TermCodeSystemStorageSvcImpl();
|
||||||
|
|
|
@ -1410,8 +1410,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional
|
|
||||||
public <MT extends IBaseMetaType> MT metaGetOperation(Class<MT> theType, IIdType theId, RequestDetails theRequest) {
|
public <MT extends IBaseMetaType> MT metaGetOperation(Class<MT> theType, IIdType theId, RequestDetails theRequest) {
|
||||||
|
return myTransactionService.withRequest(theRequest).execute(() -> {
|
||||||
Set<TagDefinition> tagDefs = new HashSet<>();
|
Set<TagDefinition> tagDefs = new HashSet<>();
|
||||||
BaseHasResource entity = readEntity(theId, theRequest);
|
BaseHasResource entity = readEntity(theId, theRequest);
|
||||||
for (BaseTag next : entity.getTags()) {
|
for (BaseTag next : entity.getTags()) {
|
||||||
|
@ -1423,6 +1423,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
retVal.setVersionId(Long.toString(entity.getVersion()));
|
retVal.setVersionId(Long.toString(entity.getVersion()));
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
||||||
|
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkMetadataView;
|
||||||
|
import org.springframework.data.domain.Page;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
public interface IBatch2WorkChunkMetadataViewRepository extends JpaRepository<Batch2WorkChunkMetadataView, String> {
|
||||||
|
|
||||||
|
@Query("SELECT v FROM Batch2WorkChunkMetadataView v WHERE v.myInstanceId = :instanceId AND v.myStatus IN :states "
|
||||||
|
+ " ORDER BY v.myInstanceId, v.myTargetStepId, v.myStatus, v.mySequence, v.myId ASC")
|
||||||
|
Page<Batch2WorkChunkMetadataView> fetchWorkChunkMetadataForJobInStates(
|
||||||
|
Pageable thePageRequest,
|
||||||
|
@Param("instanceId") String theInstanceId,
|
||||||
|
@Param("states") Collection<WorkChunkStatusEnum> theStates);
|
||||||
|
}
|
|
@ -49,7 +49,8 @@ public interface IBatch2WorkChunkRepository
|
||||||
@Query("SELECT new Batch2WorkChunkEntity("
|
@Query("SELECT new Batch2WorkChunkEntity("
|
||||||
+ "e.myId, e.mySequence, e.myJobDefinitionId, e.myJobDefinitionVersion, e.myInstanceId, e.myTargetStepId, e.myStatus,"
|
+ "e.myId, e.mySequence, e.myJobDefinitionId, e.myJobDefinitionVersion, e.myInstanceId, e.myTargetStepId, e.myStatus,"
|
||||||
+ "e.myCreateTime, e.myStartTime, e.myUpdateTime, e.myEndTime,"
|
+ "e.myCreateTime, e.myStartTime, e.myUpdateTime, e.myEndTime,"
|
||||||
+ "e.myErrorMessage, e.myErrorCount, e.myRecordsProcessed, e.myWarningMessage"
|
+ "e.myErrorMessage, e.myErrorCount, e.myRecordsProcessed, e.myWarningMessage,"
|
||||||
|
+ "e.myNextPollTime, e.myPollAttempts"
|
||||||
+ ") FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC, e.myId ASC")
|
+ ") FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC, e.myId ASC")
|
||||||
List<Batch2WorkChunkEntity> fetchChunksNoData(Pageable thePageRequest, @Param("instanceId") String theInstanceId);
|
List<Batch2WorkChunkEntity> fetchChunksNoData(Pageable thePageRequest, @Param("instanceId") String theInstanceId);
|
||||||
|
|
||||||
|
@ -75,6 +76,24 @@ public interface IBatch2WorkChunkRepository
|
||||||
@Param("status") WorkChunkStatusEnum theInProgress,
|
@Param("status") WorkChunkStatusEnum theInProgress,
|
||||||
@Param("warningMessage") String theWarningMessage);
|
@Param("warningMessage") String theWarningMessage);
|
||||||
|
|
||||||
|
@Modifying
|
||||||
|
@Query(
|
||||||
|
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myNextPollTime = :nextPollTime, e.myPollAttempts = e.myPollAttempts + 1 WHERE e.myId = :id AND e.myStatus IN(:states)")
|
||||||
|
int updateWorkChunkNextPollTime(
|
||||||
|
@Param("id") String theChunkId,
|
||||||
|
@Param("status") WorkChunkStatusEnum theStatus,
|
||||||
|
@Param("states") Set<WorkChunkStatusEnum> theInitialStates,
|
||||||
|
@Param("nextPollTime") Date theNextPollTime);
|
||||||
|
|
||||||
|
@Modifying
|
||||||
|
@Query(
|
||||||
|
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myNextPollTime = null WHERE e.myInstanceId = :instanceId AND e.myStatus IN(:states) AND e.myNextPollTime <= :pollTime")
|
||||||
|
int updateWorkChunksForPollWaiting(
|
||||||
|
@Param("instanceId") String theInstanceId,
|
||||||
|
@Param("pollTime") Date theTime,
|
||||||
|
@Param("states") Set<WorkChunkStatusEnum> theInitialStates,
|
||||||
|
@Param("status") WorkChunkStatusEnum theNewStatus);
|
||||||
|
|
||||||
@Modifying
|
@Modifying
|
||||||
@Query(
|
@Query(
|
||||||
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.mySerializedDataVc = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)")
|
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.mySerializedDataVc = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)")
|
||||||
|
@ -102,6 +121,22 @@ public interface IBatch2WorkChunkRepository
|
||||||
@Param("status") WorkChunkStatusEnum theInProgress,
|
@Param("status") WorkChunkStatusEnum theInProgress,
|
||||||
@Param("startStatuses") Collection<WorkChunkStatusEnum> theStartStatuses);
|
@Param("startStatuses") Collection<WorkChunkStatusEnum> theStartStatuses);
|
||||||
|
|
||||||
|
@Modifying
|
||||||
|
@Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :newStatus WHERE e.myId = :id AND e.myStatus = :oldStatus")
|
||||||
|
int updateChunkStatus(
|
||||||
|
@Param("id") String theChunkId,
|
||||||
|
@Param("oldStatus") WorkChunkStatusEnum theOldStatus,
|
||||||
|
@Param("newStatus") WorkChunkStatusEnum theNewStatus);
|
||||||
|
|
||||||
|
@Modifying
|
||||||
|
@Query(
|
||||||
|
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :newStatus WHERE e.myInstanceId = :instanceId AND e.myTargetStepId = :stepId AND e.myStatus IN ( :oldStatuses )")
|
||||||
|
int updateAllChunksForStepWithStatus(
|
||||||
|
@Param("instanceId") String theInstanceId,
|
||||||
|
@Param("stepId") String theStepId,
|
||||||
|
@Param("oldStatuses") List<WorkChunkStatusEnum> theOldStatuses,
|
||||||
|
@Param("newStatus") WorkChunkStatusEnum theNewStatus);
|
||||||
|
|
||||||
@Modifying
|
@Modifying
|
||||||
@Query("DELETE FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId")
|
@Query("DELETE FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId")
|
||||||
int deleteAllForInstance(@Param("instanceId") String theInstanceId);
|
int deleteAllForInstance(@Param("instanceId") String theInstanceId);
|
||||||
|
|
|
@ -298,7 +298,7 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void expungeCurrentVersionOfResource(
|
protected void expungeCurrentVersionOfResource(
|
||||||
RequestDetails theRequestDetails, Long theResourceId, AtomicInteger theRemainingCount) {
|
RequestDetails theRequestDetails, Long theResourceId, AtomicInteger theRemainingCount) {
|
||||||
ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalStateException::new);
|
ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalStateException::new);
|
||||||
|
|
||||||
|
@ -311,8 +311,6 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
||||||
ourLog.info(
|
ourLog.info(
|
||||||
"Expunging current version of resource {}", resource.getIdDt().getValue());
|
"Expunging current version of resource {}", resource.getIdDt().getValue());
|
||||||
|
|
||||||
deleteAllSearchParams(JpaPid.fromId(resource.getResourceId()));
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (resource.isHasTags()) {
|
if (resource.isHasTags()) {
|
||||||
myResourceTagDao.deleteByResourceId(resource.getId());
|
myResourceTagDao.deleteByResourceId(resource.getId());
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.jpa.entity;
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||||
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
||||||
import jakarta.persistence.Basic;
|
import jakarta.persistence.Basic;
|
||||||
import jakarta.persistence.Column;
|
import jakarta.persistence.Column;
|
||||||
|
@ -50,7 +51,10 @@ import static org.apache.commons.lang3.StringUtils.left;
|
||||||
@Entity
|
@Entity
|
||||||
@Table(
|
@Table(
|
||||||
name = "BT2_WORK_CHUNK",
|
name = "BT2_WORK_CHUNK",
|
||||||
indexes = {@Index(name = "IDX_BT2WC_II_SEQ", columnList = "INSTANCE_ID,SEQ")})
|
indexes = {
|
||||||
|
@Index(name = "IDX_BT2WC_II_SEQ", columnList = "INSTANCE_ID,SEQ"),
|
||||||
|
@Index(name = "IDX_BT2WC_II_SI_S_SEQ_ID", columnList = "INSTANCE_ID,TGT_STEP_ID,STAT,SEQ,ID")
|
||||||
|
})
|
||||||
public class Batch2WorkChunkEntity implements Serializable {
|
public class Batch2WorkChunkEntity implements Serializable {
|
||||||
|
|
||||||
public static final int ERROR_MSG_MAX_LENGTH = 500;
|
public static final int ERROR_MSG_MAX_LENGTH = 500;
|
||||||
|
@ -125,6 +129,19 @@ public class Batch2WorkChunkEntity implements Serializable {
|
||||||
@Column(name = "WARNING_MSG", length = WARNING_MSG_MAX_LENGTH, nullable = true)
|
@Column(name = "WARNING_MSG", length = WARNING_MSG_MAX_LENGTH, nullable = true)
|
||||||
private String myWarningMessage;
|
private String myWarningMessage;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The next time the work chunk can attempt to rerun its work step.
|
||||||
|
*/
|
||||||
|
@Column(name = "NEXT_POLL_TIME", nullable = true)
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
private Date myNextPollTime;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of times the work chunk has had its state set back to POLL_WAITING.
|
||||||
|
*/
|
||||||
|
@Column(name = "POLL_ATTEMPTS", nullable = true)
|
||||||
|
private int myPollAttempts;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default constructor for Hibernate.
|
* Default constructor for Hibernate.
|
||||||
*/
|
*/
|
||||||
|
@ -148,7 +165,9 @@ public class Batch2WorkChunkEntity implements Serializable {
|
||||||
String theErrorMessage,
|
String theErrorMessage,
|
||||||
int theErrorCount,
|
int theErrorCount,
|
||||||
Integer theRecordsProcessed,
|
Integer theRecordsProcessed,
|
||||||
String theWarningMessage) {
|
String theWarningMessage,
|
||||||
|
Date theNextPollTime,
|
||||||
|
Integer thePollAttempts) {
|
||||||
myId = theId;
|
myId = theId;
|
||||||
mySequence = theSequence;
|
mySequence = theSequence;
|
||||||
myJobDefinitionId = theJobDefinitionId;
|
myJobDefinitionId = theJobDefinitionId;
|
||||||
|
@ -164,6 +183,32 @@ public class Batch2WorkChunkEntity implements Serializable {
|
||||||
myErrorCount = theErrorCount;
|
myErrorCount = theErrorCount;
|
||||||
myRecordsProcessed = theRecordsProcessed;
|
myRecordsProcessed = theRecordsProcessed;
|
||||||
myWarningMessage = theWarningMessage;
|
myWarningMessage = theWarningMessage;
|
||||||
|
myNextPollTime = theNextPollTime;
|
||||||
|
myPollAttempts = thePollAttempts;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Batch2WorkChunkEntity fromWorkChunk(WorkChunk theWorkChunk) {
|
||||||
|
Batch2WorkChunkEntity entity = new Batch2WorkChunkEntity(
|
||||||
|
theWorkChunk.getId(),
|
||||||
|
theWorkChunk.getSequence(),
|
||||||
|
theWorkChunk.getJobDefinitionId(),
|
||||||
|
theWorkChunk.getJobDefinitionVersion(),
|
||||||
|
theWorkChunk.getInstanceId(),
|
||||||
|
theWorkChunk.getTargetStepId(),
|
||||||
|
theWorkChunk.getStatus(),
|
||||||
|
theWorkChunk.getCreateTime(),
|
||||||
|
theWorkChunk.getStartTime(),
|
||||||
|
theWorkChunk.getUpdateTime(),
|
||||||
|
theWorkChunk.getEndTime(),
|
||||||
|
theWorkChunk.getErrorMessage(),
|
||||||
|
theWorkChunk.getErrorCount(),
|
||||||
|
theWorkChunk.getRecordsProcessed(),
|
||||||
|
theWorkChunk.getWarningMessage(),
|
||||||
|
theWorkChunk.getNextPollTime(),
|
||||||
|
theWorkChunk.getPollAttempts());
|
||||||
|
entity.setSerializedData(theWorkChunk.getData());
|
||||||
|
|
||||||
|
return entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getErrorCount() {
|
public int getErrorCount() {
|
||||||
|
@ -299,6 +344,22 @@ public class Batch2WorkChunkEntity implements Serializable {
|
||||||
myInstanceId = theInstanceId;
|
myInstanceId = theInstanceId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Date getNextPollTime() {
|
||||||
|
return myNextPollTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setNextPollTime(Date theNextPollTime) {
|
||||||
|
myNextPollTime = theNextPollTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getPollAttempts() {
|
||||||
|
return myPollAttempts;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPollAttempts(int thePollAttempts) {
|
||||||
|
myPollAttempts = thePollAttempts;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||||
|
@ -318,6 +379,8 @@ public class Batch2WorkChunkEntity implements Serializable {
|
||||||
.append("status", myStatus)
|
.append("status", myStatus)
|
||||||
.append("errorMessage", myErrorMessage)
|
.append("errorMessage", myErrorMessage)
|
||||||
.append("warningMessage", myWarningMessage)
|
.append("warningMessage", myWarningMessage)
|
||||||
|
.append("nextPollTime", myNextPollTime)
|
||||||
|
.append("pollAttempts", myPollAttempts)
|
||||||
.toString();
|
.toString();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,123 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.batch2.model.WorkChunkMetadata;
|
||||||
|
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
||||||
|
import jakarta.persistence.Column;
|
||||||
|
import jakarta.persistence.Entity;
|
||||||
|
import jakarta.persistence.EnumType;
|
||||||
|
import jakarta.persistence.Enumerated;
|
||||||
|
import jakarta.persistence.Id;
|
||||||
|
import org.hibernate.annotations.Immutable;
|
||||||
|
import org.hibernate.annotations.Subselect;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.batch2.model.JobDefinition.ID_MAX_LENGTH;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A view for a Work Chunk that contains only the most necessary information
|
||||||
|
* to satisfy the no-data path.
|
||||||
|
*/
|
||||||
|
@Entity
|
||||||
|
@Immutable
|
||||||
|
@Subselect("SELECT e.id as id, "
|
||||||
|
+ " e.seq as seq,"
|
||||||
|
+ " e.stat as state, "
|
||||||
|
+ " e.instance_id as instance_id, "
|
||||||
|
+ " e.definition_id as job_definition_id, "
|
||||||
|
+ " e.definition_ver as job_definition_version, "
|
||||||
|
+ " e.tgt_step_id as target_step_id "
|
||||||
|
+ "FROM BT2_WORK_CHUNK e")
|
||||||
|
public class Batch2WorkChunkMetadataView implements Serializable {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@Column(name = "ID", length = ID_MAX_LENGTH)
|
||||||
|
private String myId;
|
||||||
|
|
||||||
|
@Column(name = "SEQ", nullable = false)
|
||||||
|
private int mySequence;
|
||||||
|
|
||||||
|
@Column(name = "STATE", length = ID_MAX_LENGTH, nullable = false)
|
||||||
|
@Enumerated(EnumType.STRING)
|
||||||
|
private WorkChunkStatusEnum myStatus;
|
||||||
|
|
||||||
|
@Column(name = "INSTANCE_ID", length = ID_MAX_LENGTH, nullable = false)
|
||||||
|
private String myInstanceId;
|
||||||
|
|
||||||
|
@Column(name = "JOB_DEFINITION_ID", length = ID_MAX_LENGTH, nullable = false)
|
||||||
|
private String myJobDefinitionId;
|
||||||
|
|
||||||
|
@Column(name = "JOB_DEFINITION_VERSION", nullable = false)
|
||||||
|
private int myJobDefinitionVersion;
|
||||||
|
|
||||||
|
@Column(name = "TARGET_STEP_ID", length = ID_MAX_LENGTH, nullable = false)
|
||||||
|
private String myTargetStepId;
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return myId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String theId) {
|
||||||
|
myId = theId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getSequence() {
|
||||||
|
return mySequence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSequence(int theSequence) {
|
||||||
|
mySequence = theSequence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public WorkChunkStatusEnum getStatus() {
|
||||||
|
return myStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setStatus(WorkChunkStatusEnum theStatus) {
|
||||||
|
myStatus = theStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getInstanceId() {
|
||||||
|
return myInstanceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setInstanceId(String theInstanceId) {
|
||||||
|
myInstanceId = theInstanceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getJobDefinitionId() {
|
||||||
|
return myJobDefinitionId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setJobDefinitionId(String theJobDefinitionId) {
|
||||||
|
myJobDefinitionId = theJobDefinitionId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getJobDefinitionVersion() {
|
||||||
|
return myJobDefinitionVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setJobDefinitionVersion(int theJobDefinitionVersion) {
|
||||||
|
myJobDefinitionVersion = theJobDefinitionVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTargetStepId() {
|
||||||
|
return myTargetStepId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTargetStepId(String theTargetStepId) {
|
||||||
|
myTargetStepId = theTargetStepId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public WorkChunkMetadata toChunkMetadata() {
|
||||||
|
WorkChunkMetadata metadata = new WorkChunkMetadata();
|
||||||
|
metadata.setId(getId());
|
||||||
|
metadata.setInstanceId(getInstanceId());
|
||||||
|
metadata.setSequence(getSequence());
|
||||||
|
metadata.setStatus(getStatus());
|
||||||
|
metadata.setJobDefinitionId(getJobDefinitionId());
|
||||||
|
metadata.setJobDefinitionVersion(getJobDefinitionVersion());
|
||||||
|
metadata.setTargetStepId(getTargetStepId());
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
}
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.search.DeferConceptIndexingRoutingBinder;
|
import ca.uhn.fhir.jpa.search.DeferConceptIndexingRoutingBinder;
|
||||||
import ca.uhn.fhir.util.ValidateUtil;
|
import ca.uhn.fhir.util.ValidateUtil;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.persistence.Column;
|
import jakarta.persistence.Column;
|
||||||
import jakarta.persistence.Entity;
|
import jakarta.persistence.Entity;
|
||||||
|
@ -58,10 +59,7 @@ import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderR
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency;
|
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath;
|
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding;
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
|
|
||||||
import org.hl7.fhir.r4.model.Coding;
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -177,6 +175,11 @@ public class TermConcept implements Serializable {
|
||||||
@Column(name = "PARENT_PIDS", nullable = true)
|
@Column(name = "PARENT_PIDS", nullable = true)
|
||||||
private String myParentPids;
|
private String myParentPids;
|
||||||
|
|
||||||
|
@FullTextField(
|
||||||
|
name = "myParentPids",
|
||||||
|
searchable = Searchable.YES,
|
||||||
|
projectable = Projectable.YES,
|
||||||
|
analyzer = "conceptParentPidsAnalyzer")
|
||||||
@Column(name = "PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
|
@Column(name = "PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
|
||||||
private String myParentPidsVc;
|
private String myParentPidsVc;
|
||||||
|
|
||||||
|
@ -189,6 +192,9 @@ public class TermConcept implements Serializable {
|
||||||
@Column(name = "CODE_SEQUENCE", nullable = true)
|
@Column(name = "CODE_SEQUENCE", nullable = true)
|
||||||
private Integer mySequence;
|
private Integer mySequence;
|
||||||
|
|
||||||
|
@Transient
|
||||||
|
private boolean mySupportLegacyLob = false;
|
||||||
|
|
||||||
public TermConcept() {
|
public TermConcept() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
@ -362,13 +368,6 @@ public class TermConcept implements Serializable {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transient
|
|
||||||
@FullTextField(
|
|
||||||
name = "myParentPids",
|
|
||||||
searchable = Searchable.YES,
|
|
||||||
projectable = Projectable.YES,
|
|
||||||
analyzer = "conceptParentPidsAnalyzer")
|
|
||||||
@IndexingDependency(derivedFrom = @ObjectPath({@PropertyValue(propertyName = "myParentPidsVc")}))
|
|
||||||
public String getParentPidsAsString() {
|
public String getParentPidsAsString() {
|
||||||
return nonNull(myParentPidsVc) ? myParentPidsVc : myParentPids;
|
return nonNull(myParentPidsVc) ? myParentPidsVc : myParentPids;
|
||||||
}
|
}
|
||||||
|
@ -458,6 +457,10 @@ public class TermConcept implements Serializable {
|
||||||
|
|
||||||
ourLog.trace("Code {}/{} has parents {}", entity.getId(), entity.getCode(), entity.getParentPidsAsString());
|
ourLog.trace("Code {}/{} has parents {}", entity.getId(), entity.getCode(), entity.getParentPidsAsString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!mySupportLegacyLob) {
|
||||||
|
clearParentPidsLob();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setParentPids(Set<Long> theParentPids) {
|
private void setParentPids(Set<Long> theParentPids) {
|
||||||
|
@ -519,4 +522,17 @@ public class TermConcept implements Serializable {
|
||||||
public List<TermConcept> getChildCodes() {
|
public List<TermConcept> getChildCodes() {
|
||||||
return getChildren().stream().map(TermConceptParentChildLink::getChild).collect(Collectors.toList());
|
return getChildren().stream().map(TermConceptParentChildLink::getChild).collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void flagForLegacyLobSupport(boolean theSupportLegacyLob) {
|
||||||
|
mySupportLegacyLob = theSupportLegacyLob;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void clearParentPidsLob() {
|
||||||
|
myParentPids = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public boolean hasParentPidsLobForTesting() {
|
||||||
|
return nonNull(myParentPids);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,6 +54,7 @@ import org.hibernate.validator.constraints.NotBlank;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
|
import static java.util.Objects.nonNull;
|
||||||
import static org.apache.commons.lang3.StringUtils.left;
|
import static org.apache.commons.lang3.StringUtils.left;
|
||||||
import static org.apache.commons.lang3.StringUtils.length;
|
import static org.apache.commons.lang3.StringUtils.length;
|
||||||
|
|
||||||
|
@ -307,9 +308,15 @@ public class TermConceptProperty implements Serializable {
|
||||||
return myId;
|
return myId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void performLegacyLobSupport(boolean theSupportLegacyLob) {
|
||||||
|
if (!theSupportLegacyLob) {
|
||||||
|
myValueLob = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public byte[] getValueBlobForTesting() {
|
public boolean hasValueBlobForTesting() {
|
||||||
return myValueLob;
|
return nonNull(myValueLob);
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
@ -318,8 +325,8 @@ public class TermConceptProperty implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public byte[] getValueBinForTesting() {
|
public boolean hasValueBinForTesting() {
|
||||||
return myValueBin;
|
return nonNull(myValueBin);
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package ca.uhn.fhir.jpa.entity;
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
import ca.uhn.fhir.util.ValidateUtil;
|
import ca.uhn.fhir.util.ValidateUtil;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.persistence.Column;
|
import jakarta.persistence.Column;
|
||||||
import jakarta.persistence.Entity;
|
import jakarta.persistence.Entity;
|
||||||
|
@ -46,6 +47,7 @@ import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import static java.util.Objects.nonNull;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
|
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
|
||||||
import static org.apache.commons.lang3.StringUtils.left;
|
import static org.apache.commons.lang3.StringUtils.left;
|
||||||
import static org.apache.commons.lang3.StringUtils.length;
|
import static org.apache.commons.lang3.StringUtils.length;
|
||||||
|
@ -296,4 +298,13 @@ public class TermValueSetConcept implements Serializable {
|
||||||
? mySourceConceptDirectParentPidsVc
|
? mySourceConceptDirectParentPidsVc
|
||||||
: mySourceConceptDirectParentPids;
|
: mySourceConceptDirectParentPids;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void clearSourceConceptDirectParentPidsLob() {
|
||||||
|
mySourceConceptDirectParentPids = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public boolean hasSourceConceptDirectParentPidsLob() {
|
||||||
|
return nonNull(mySourceConceptDirectParentPids);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,6 +51,7 @@ import java.sql.SQLException;
|
||||||
+ " vsc.SYSTEM_VER AS SYSTEM_VER, "
|
+ " vsc.SYSTEM_VER AS SYSTEM_VER, "
|
||||||
+ " vsc.SOURCE_PID AS SOURCE_PID, "
|
+ " vsc.SOURCE_PID AS SOURCE_PID, "
|
||||||
+ " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, "
|
+ " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, "
|
||||||
|
+ " vsc.SOURCE_DIRECT_PARENT_PIDS_VC AS SOURCE_DIRECT_PARENT_PIDS_VC, "
|
||||||
+ " vscd.PID AS DESIGNATION_PID, "
|
+ " vscd.PID AS DESIGNATION_PID, "
|
||||||
+ " vscd.LANG AS DESIGNATION_LANG, "
|
+ " vscd.LANG AS DESIGNATION_LANG, "
|
||||||
+ " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, "
|
+ " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, "
|
||||||
|
@ -112,6 +113,9 @@ public class TermValueSetConceptView implements Serializable, ITermValueSetConce
|
||||||
@Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true)
|
@Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true)
|
||||||
private Clob mySourceConceptDirectParentPids;
|
private Clob mySourceConceptDirectParentPids;
|
||||||
|
|
||||||
|
@Column(name = "SOURCE_DIRECT_PARENT_PIDS_VC", nullable = true)
|
||||||
|
private String mySourceConceptDirectParentPidsVc;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Long getSourceConceptPid() {
|
public Long getSourceConceptPid() {
|
||||||
return mySourceConceptPid;
|
return mySourceConceptPid;
|
||||||
|
@ -119,14 +123,19 @@ public class TermValueSetConceptView implements Serializable, ITermValueSetConce
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getSourceConceptDirectParentPids() {
|
public String getSourceConceptDirectParentPids() {
|
||||||
|
String retVal = null;
|
||||||
|
|
||||||
if (mySourceConceptDirectParentPids != null) {
|
if (mySourceConceptDirectParentPids != null) {
|
||||||
try (Reader characterStream = mySourceConceptDirectParentPids.getCharacterStream()) {
|
try (Reader characterStream = mySourceConceptDirectParentPids.getCharacterStream()) {
|
||||||
return IOUtils.toString(characterStream);
|
retVal = IOUtils.toString(characterStream);
|
||||||
} catch (IOException | SQLException e) {
|
} catch (IOException | SQLException e) {
|
||||||
throw new InternalErrorException(Msg.code(828) + e);
|
throw new InternalErrorException(Msg.code(828) + e);
|
||||||
}
|
}
|
||||||
|
} else if (mySourceConceptDirectParentPidsVc != null) {
|
||||||
|
retVal = mySourceConceptDirectParentPidsVc;
|
||||||
}
|
}
|
||||||
return null;
|
|
||||||
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||||
import ca.uhn.fhir.jpa.entity.Search;
|
import ca.uhn.fhir.jpa.entity.Search;
|
||||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateOrdinalDatesTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateOrdinalDatesTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
|
||||||
|
@ -146,8 +147,16 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
|
|
||||||
binaryStorageBlobTable
|
binaryStorageBlobTable
|
||||||
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
|
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
|
||||||
|
.getLastAddedTask()
|
||||||
|
.ifPresent(BaseTask::doNothing);
|
||||||
|
binaryStorageBlobTable
|
||||||
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
|
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
|
||||||
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH");
|
.getLastAddedTask()
|
||||||
|
.ifPresent(BaseTask::doNothing);
|
||||||
|
binaryStorageBlobTable
|
||||||
|
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH")
|
||||||
|
.getLastAddedTask()
|
||||||
|
.ifPresent(BaseTask::doNothing);
|
||||||
|
|
||||||
binaryStorageBlobTable
|
binaryStorageBlobTable
|
||||||
.modifyColumn("20240404.4", "BLOB_DATA")
|
.modifyColumn("20240404.4", "BLOB_DATA")
|
||||||
|
@ -159,9 +168,23 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
.nullable()
|
.nullable()
|
||||||
.type(ColumnTypeEnum.BINARY);
|
.type(ColumnTypeEnum.BINARY);
|
||||||
|
|
||||||
binaryStorageBlobTable.migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN");
|
binaryStorageBlobTable
|
||||||
|
.migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN")
|
||||||
|
.doNothing();
|
||||||
|
|
||||||
binaryStorageBlobTable.renameTable("20240404.7", "HFJ_BINARY_STORAGE");
|
binaryStorageBlobTable
|
||||||
|
.renameTable("20240404.7", "HFJ_BINARY_STORAGE")
|
||||||
|
.doNothing();
|
||||||
|
|
||||||
|
Builder.BuilderWithTableName binaryStorageTableFix = version.onTable("HFJ_BINARY_STORAGE");
|
||||||
|
|
||||||
|
binaryStorageTableFix.renameColumn("20240404.10", "CONTENT_ID", "BLOB_ID", true, true);
|
||||||
|
binaryStorageTableFix.renameColumn("20240404.20", "CONTENT_SIZE", "BLOB_SIZE", true, true);
|
||||||
|
binaryStorageTableFix.renameColumn("20240404.30", "CONTENT_HASH", "BLOB_HASH", true, true);
|
||||||
|
|
||||||
|
binaryStorageTableFix
|
||||||
|
.renameTable("20240404.40", "HFJ_BINARY_STORAGE_BLOB")
|
||||||
|
.failureAllowed();
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -172,7 +195,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
.nullable()
|
.nullable()
|
||||||
.type(ColumnTypeEnum.BINARY);
|
.type(ColumnTypeEnum.BINARY);
|
||||||
|
|
||||||
termConceptPropertyTable.migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN");
|
termConceptPropertyTable
|
||||||
|
.migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN")
|
||||||
|
.doNothing();
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -182,8 +207,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
.nullable()
|
.nullable()
|
||||||
.type(ColumnTypeEnum.TEXT);
|
.type(ColumnTypeEnum.TEXT);
|
||||||
|
|
||||||
termValueSetConceptTable.migrateClobToText(
|
termValueSetConceptTable
|
||||||
"20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC");
|
.migrateClobToText("20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC")
|
||||||
|
.doNothing();
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -193,7 +219,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
.nullable()
|
.nullable()
|
||||||
.type(ColumnTypeEnum.TEXT);
|
.type(ColumnTypeEnum.TEXT);
|
||||||
|
|
||||||
termConceptTable.migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC");
|
termConceptTable
|
||||||
|
.migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC")
|
||||||
|
.doNothing();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -293,6 +321,23 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
|
|
||||||
// This fix will work for MSSQL or Oracle.
|
// This fix will work for MSSQL or Oracle.
|
||||||
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231222.1"));
|
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231222.1"));
|
||||||
|
|
||||||
|
// add index to Batch2WorkChunkEntity
|
||||||
|
Builder.BuilderWithTableName workChunkTable = version.onTable("BT2_WORK_CHUNK");
|
||||||
|
|
||||||
|
workChunkTable
|
||||||
|
.addIndex("20240321.1", "IDX_BT2WC_II_SI_S_SEQ_ID")
|
||||||
|
.unique(false)
|
||||||
|
.withColumns("INSTANCE_ID", "TGT_STEP_ID", "STAT", "SEQ", "ID");
|
||||||
|
|
||||||
|
// add columns to Batch2WorkChunkEntity
|
||||||
|
Builder.BuilderWithTableName batch2WorkChunkTable = version.onTable("BT2_WORK_CHUNK");
|
||||||
|
|
||||||
|
batch2WorkChunkTable
|
||||||
|
.addColumn("20240322.1", "NEXT_POLL_TIME")
|
||||||
|
.nullable()
|
||||||
|
.type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||||
|
batch2WorkChunkTable.addColumn("20240322.2", "POLL_ATTEMPTS").nullable().type(ColumnTypeEnum.INT);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void init680_Part2() {
|
private void init680_Part2() {
|
||||||
|
|
|
@ -37,7 +37,7 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
|
||||||
IPartitionLookupSvc myPartitionConfigSvc;
|
IPartitionLookupSvc myPartitionConfigSvc;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) {
|
public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) {
|
||||||
List<String> names = null;
|
List<String> names = null;
|
||||||
for (int i = 0; i < theRequestPartitionId.getPartitionIds().size(); i++) {
|
for (int i = 0; i < theRequestPartitionId.getPartitionIds().size(); i++) {
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (theRequestPartitionId.getPartitionNames() != null) {
|
if (theRequestPartitionId.hasPartitionNames()) {
|
||||||
if (partition == null) {
|
if (partition == null) {
|
||||||
Validate.isTrue(
|
Validate.isTrue(
|
||||||
theRequestPartitionId.getPartitionIds().get(i) == null,
|
theRequestPartitionId.getPartitionIds().get(i) == null,
|
||||||
|
@ -68,8 +68,8 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
|
||||||
} else {
|
} else {
|
||||||
Validate.isTrue(
|
Validate.isTrue(
|
||||||
Objects.equals(
|
Objects.equals(
|
||||||
theRequestPartitionId.getPartitionIds().get(i), partition.getId()),
|
theRequestPartitionId.getPartitionNames().get(i), partition.getName()),
|
||||||
"Partition name %s does not match ID %n",
|
"Partition name %s does not match ID %s",
|
||||||
theRequestPartitionId.getPartitionNames().get(i),
|
theRequestPartitionId.getPartitionNames().get(i),
|
||||||
theRequestPartitionId.getPartitionIds().get(i));
|
theRequestPartitionId.getPartitionIds().get(i));
|
||||||
}
|
}
|
||||||
|
@ -94,7 +94,7 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) {
|
public RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) {
|
||||||
List<Integer> ids = null;
|
List<Integer> ids = null;
|
||||||
for (int i = 0; i < theRequestPartitionId.getPartitionNames().size(); i++) {
|
for (int i = 0; i < theRequestPartitionId.getPartitionNames().size(); i++) {
|
||||||
|
|
||||||
|
@ -122,9 +122,9 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
|
||||||
Validate.isTrue(
|
Validate.isTrue(
|
||||||
Objects.equals(
|
Objects.equals(
|
||||||
theRequestPartitionId.getPartitionIds().get(i), partition.getId()),
|
theRequestPartitionId.getPartitionIds().get(i), partition.getId()),
|
||||||
"Partition name %s does not match ID %n",
|
"Partition ID %s does not match name %s",
|
||||||
theRequestPartitionId.getPartitionNames().get(i),
|
theRequestPartitionId.getPartitionIds().get(i),
|
||||||
theRequestPartitionId.getPartitionIds().get(i));
|
theRequestPartitionId.getPartitionNames().get(i));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (ids == null) {
|
if (ids == null) {
|
||||||
|
|
|
@ -84,6 +84,13 @@ public abstract class BaseJpaResourceProviderObservation<T extends IBaseResource
|
||||||
}
|
}
|
||||||
if (theMax != null) {
|
if (theMax != null) {
|
||||||
paramMap.setLastNMax(theMax.getValue());
|
paramMap.setLastNMax(theMax.getValue());
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The removal of the original raw parameter is required as every implementing class
|
||||||
|
* has the "Observation" resource class defined. For this resource, the max parameter
|
||||||
|
* is not supported and thus has to be removed before the use of "translateRawParameters".
|
||||||
|
*/
|
||||||
|
theAdditionalRawParams.remove("max");
|
||||||
}
|
}
|
||||||
if (theCount != null) {
|
if (theCount != null) {
|
||||||
paramMap.setCount(theCount.getValue());
|
paramMap.setCount(theCount.getValue());
|
||||||
|
|
|
@ -353,26 +353,39 @@ public class QueryStack {
|
||||||
throw new InvalidRequestException(Msg.code(2289) + msg);
|
throw new InvalidRequestException(Msg.code(2289) + msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
BaseSearchParamPredicateBuilder chainedPredicateBuilder;
|
// add a left-outer join to a predicate for the target type, then sort on value columns(s).
|
||||||
DbColumn[] sortColumn;
|
|
||||||
switch (targetSearchParameter.getParamType()) {
|
switch (targetSearchParameter.getParamType()) {
|
||||||
case STRING:
|
case STRING:
|
||||||
StringPredicateBuilder stringPredicateBuilder = mySqlBuilder.createStringPredicateBuilder();
|
StringPredicateBuilder stringPredicateBuilder = mySqlBuilder.createStringPredicateBuilder();
|
||||||
sortColumn = new DbColumn[] {stringPredicateBuilder.getColumnValueNormalized()};
|
addSortCustomJoin(
|
||||||
chainedPredicateBuilder = stringPredicateBuilder;
|
resourceLinkPredicateBuilder.getColumnTargetResourceId(),
|
||||||
break;
|
stringPredicateBuilder,
|
||||||
|
stringPredicateBuilder.createHashIdentityPredicate(targetType, theChain));
|
||||||
|
|
||||||
|
mySqlBuilder.addSortString(
|
||||||
|
stringPredicateBuilder.getColumnValueNormalized(), theAscending, myUseAggregate);
|
||||||
|
return;
|
||||||
|
|
||||||
case TOKEN:
|
case TOKEN:
|
||||||
TokenPredicateBuilder tokenPredicateBuilder = mySqlBuilder.createTokenPredicateBuilder();
|
TokenPredicateBuilder tokenPredicateBuilder = mySqlBuilder.createTokenPredicateBuilder();
|
||||||
sortColumn =
|
addSortCustomJoin(
|
||||||
new DbColumn[] {tokenPredicateBuilder.getColumnSystem(), tokenPredicateBuilder.getColumnValue()
|
resourceLinkPredicateBuilder.getColumnTargetResourceId(),
|
||||||
};
|
tokenPredicateBuilder,
|
||||||
chainedPredicateBuilder = tokenPredicateBuilder;
|
tokenPredicateBuilder.createHashIdentityPredicate(targetType, theChain));
|
||||||
break;
|
|
||||||
|
mySqlBuilder.addSortString(tokenPredicateBuilder.getColumnSystem(), theAscending, myUseAggregate);
|
||||||
|
mySqlBuilder.addSortString(tokenPredicateBuilder.getColumnValue(), theAscending, myUseAggregate);
|
||||||
|
return;
|
||||||
|
|
||||||
case DATE:
|
case DATE:
|
||||||
DatePredicateBuilder datePredicateBuilder = mySqlBuilder.createDatePredicateBuilder();
|
DatePredicateBuilder datePredicateBuilder = mySqlBuilder.createDatePredicateBuilder();
|
||||||
sortColumn = new DbColumn[] {datePredicateBuilder.getColumnValueLow()};
|
addSortCustomJoin(
|
||||||
chainedPredicateBuilder = datePredicateBuilder;
|
resourceLinkPredicateBuilder.getColumnTargetResourceId(),
|
||||||
break;
|
datePredicateBuilder,
|
||||||
|
datePredicateBuilder.createHashIdentityPredicate(targetType, theChain));
|
||||||
|
|
||||||
|
mySqlBuilder.addSortDate(datePredicateBuilder.getColumnValueLow(), theAscending, myUseAggregate);
|
||||||
|
return;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Note that many of the options below aren't implemented because they
|
* Note that many of the options below aren't implemented because they
|
||||||
|
@ -417,14 +430,6 @@ public class QueryStack {
|
||||||
+ theParamName + "." + theChain + " as this parameter. Can not sort on chains of target type: "
|
+ theParamName + "." + theChain + " as this parameter. Can not sort on chains of target type: "
|
||||||
+ targetSearchParameter.getParamType().name());
|
+ targetSearchParameter.getParamType().name());
|
||||||
}
|
}
|
||||||
|
|
||||||
addSortCustomJoin(resourceLinkPredicateBuilder.getColumnTargetResourceId(), chainedPredicateBuilder, null);
|
|
||||||
Condition predicate = chainedPredicateBuilder.createHashIdentityPredicate(targetType, theChain);
|
|
||||||
mySqlBuilder.addPredicate(predicate);
|
|
||||||
|
|
||||||
for (DbColumn next : sortColumn) {
|
|
||||||
mySqlBuilder.addSortNumeric(next, theAscending, myUseAggregate);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addSortOnString(String theResourceName, String theParamName, boolean theAscending) {
|
public void addSortOnString(String theResourceName, String theParamName, boolean theAscending) {
|
||||||
|
|
|
@ -73,7 +73,6 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.IdentityHashMap;
|
import java.util.IdentityHashMap;
|
||||||
|
@ -685,26 +684,6 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
|
||||||
ourLog.trace("Checking {} parents", theParents.size());
|
|
||||||
int retVal = 0;
|
|
||||||
|
|
||||||
for (TermConceptParentChildLink nextLink : theParents) {
|
|
||||||
if (nextLink.getRelationshipType() == TermConceptParentChildLink.RelationshipTypeEnum.ISA) {
|
|
||||||
TermConcept nextParent = nextLink.getParent();
|
|
||||||
retVal += ensureParentsSaved(nextParent.getParents());
|
|
||||||
if (nextParent.getId() == null) {
|
|
||||||
nextParent.setUpdated(new Date());
|
|
||||||
myConceptDao.saveAndFlush(nextParent);
|
|
||||||
retVal++;
|
|
||||||
ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private TermCodeSystem getOrCreateDistinctTermCodeSystem(
|
private TermCodeSystem getOrCreateDistinctTermCodeSystem(
|
||||||
IResourcePersistentId theCodeSystemResourcePid,
|
IResourcePersistentId theCodeSystemResourcePid,
|
||||||
|
|
|
@ -46,6 +46,8 @@ public class TermConceptDaoSvc {
|
||||||
@Autowired
|
@Autowired
|
||||||
protected ITermConceptDesignationDao myConceptDesignationDao;
|
protected ITermConceptDesignationDao myConceptDesignationDao;
|
||||||
|
|
||||||
|
private boolean mySupportLegacyLob = false;
|
||||||
|
|
||||||
public int saveConcept(TermConcept theConcept) {
|
public int saveConcept(TermConcept theConcept) {
|
||||||
int retVal = 0;
|
int retVal = 0;
|
||||||
|
|
||||||
|
@ -70,9 +72,11 @@ public class TermConceptDaoSvc {
|
||||||
retVal++;
|
retVal++;
|
||||||
theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
|
theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
|
||||||
theConcept.setUpdated(new Date());
|
theConcept.setUpdated(new Date());
|
||||||
|
theConcept.flagForLegacyLobSupport(mySupportLegacyLob);
|
||||||
myConceptDao.save(theConcept);
|
myConceptDao.save(theConcept);
|
||||||
|
|
||||||
for (TermConceptProperty next : theConcept.getProperties()) {
|
for (TermConceptProperty next : theConcept.getProperties()) {
|
||||||
|
next.performLegacyLobSupport(mySupportLegacyLob);
|
||||||
myConceptPropertyDao.save(next);
|
myConceptPropertyDao.save(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,6 +89,11 @@ public class TermConceptDaoSvc {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TermConceptDaoSvc setSupportLegacyLob(boolean theSupportLegacyLob) {
|
||||||
|
mySupportLegacyLob = theSupportLegacyLob;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
||||||
ourLog.trace("Checking {} parents", theParents.size());
|
ourLog.trace("Checking {} parents", theParents.size());
|
||||||
int retVal = 0;
|
int retVal = 0;
|
||||||
|
@ -95,6 +104,7 @@ public class TermConceptDaoSvc {
|
||||||
retVal += ensureParentsSaved(nextParent.getParents());
|
retVal += ensureParentsSaved(nextParent.getParents());
|
||||||
if (nextParent.getId() == null) {
|
if (nextParent.getId() == null) {
|
||||||
nextParent.setUpdated(new Date());
|
nextParent.setUpdated(new Date());
|
||||||
|
nextParent.flagForLegacyLobSupport(mySupportLegacyLob);
|
||||||
myConceptDao.saveAndFlush(nextParent);
|
myConceptDao.saveAndFlush(nextParent);
|
||||||
retVal++;
|
retVal++;
|
||||||
ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId());
|
ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId());
|
||||||
|
|
|
@ -293,6 +293,9 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
@Autowired
|
@Autowired
|
||||||
private InMemoryTerminologyServerValidationSupport myInMemoryTerminologyServerValidationSupport;
|
private InMemoryTerminologyServerValidationSupport myInMemoryTerminologyServerValidationSupport;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ValueSetConceptAccumulatorFactory myValueSetConceptAccumulatorFactory;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) {
|
public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) {
|
||||||
TermCodeSystemVersionDetails cs = getCurrentCodeSystemVersion(theSystem);
|
TermCodeSystemVersionDetails cs = getCurrentCodeSystemVersion(theSystem);
|
||||||
|
@ -2393,11 +2396,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
});
|
});
|
||||||
assert valueSet != null;
|
assert valueSet != null;
|
||||||
|
|
||||||
ValueSetConceptAccumulator accumulator = new ValueSetConceptAccumulator(
|
ValueSetConceptAccumulator valueSetConceptAccumulator =
|
||||||
valueSetToExpand, myTermValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao);
|
myValueSetConceptAccumulatorFactory.create(valueSetToExpand);
|
||||||
ValueSetExpansionOptions options = new ValueSetExpansionOptions();
|
ValueSetExpansionOptions options = new ValueSetExpansionOptions();
|
||||||
options.setIncludeHierarchy(true);
|
options.setIncludeHierarchy(true);
|
||||||
expandValueSet(options, valueSet, accumulator);
|
expandValueSet(options, valueSet, valueSetConceptAccumulator);
|
||||||
|
|
||||||
// We are done with this ValueSet.
|
// We are done with this ValueSet.
|
||||||
txTemplate.executeWithoutResult(t -> {
|
txTemplate.executeWithoutResult(t -> {
|
||||||
|
@ -2412,7 +2415,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
"Pre-expanded ValueSet[{}] with URL[{}] - Saved {} concepts in {}",
|
"Pre-expanded ValueSet[{}] with URL[{}] - Saved {} concepts in {}",
|
||||||
valueSet.getId(),
|
valueSet.getId(),
|
||||||
valueSet.getUrl(),
|
valueSet.getUrl(),
|
||||||
accumulator.getConceptsSaved(),
|
valueSetConceptAccumulator.getConceptsSaved(),
|
||||||
sw);
|
sw);
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
|
|
@ -48,6 +48,8 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
||||||
private int myDesignationsSaved;
|
private int myDesignationsSaved;
|
||||||
private int myConceptsExcluded;
|
private int myConceptsExcluded;
|
||||||
|
|
||||||
|
private boolean mySupportLegacyLob = false;
|
||||||
|
|
||||||
public ValueSetConceptAccumulator(
|
public ValueSetConceptAccumulator(
|
||||||
@Nonnull TermValueSet theTermValueSet,
|
@Nonnull TermValueSet theTermValueSet,
|
||||||
@Nonnull ITermValueSetDao theValueSetDao,
|
@Nonnull ITermValueSetDao theValueSetDao,
|
||||||
|
@ -184,6 +186,10 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
||||||
concept.setSourceConceptPid(theSourceConceptPid);
|
concept.setSourceConceptPid(theSourceConceptPid);
|
||||||
concept.setSourceConceptDirectParentPids(theSourceConceptDirectParentPids);
|
concept.setSourceConceptDirectParentPids(theSourceConceptDirectParentPids);
|
||||||
|
|
||||||
|
if (!mySupportLegacyLob) {
|
||||||
|
concept.clearSourceConceptDirectParentPidsLob();
|
||||||
|
}
|
||||||
|
|
||||||
myValueSetConceptDao.save(concept);
|
myValueSetConceptDao.save(concept);
|
||||||
myValueSetDao.save(myTermValueSet.incrementTotalConcepts());
|
myValueSetDao.save(myTermValueSet.incrementTotalConcepts());
|
||||||
|
|
||||||
|
@ -253,4 +259,9 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator {
|
||||||
// TODO: DM 2019-07-16 - If so, we should also populate TermValueSetConceptProperty entities here.
|
// TODO: DM 2019-07-16 - If so, we should also populate TermValueSetConceptProperty entities here.
|
||||||
// TODO: DM 2019-07-30 - Expansions don't include the properties themselves; they may be needed to facilitate
|
// TODO: DM 2019-07-30 - Expansions don't include the properties themselves; they may be needed to facilitate
|
||||||
// filters and parameterized expansions.
|
// filters and parameterized expansions.
|
||||||
|
|
||||||
|
public ValueSetConceptAccumulator setSupportLegacyLob(boolean theSupportLegacyLob) {
|
||||||
|
mySupportLegacyLob = theSupportLegacyLob;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
public class ValueSetConceptAccumulatorFactory {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ITermValueSetDao myValueSetDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ITermValueSetConceptDao myValueSetConceptDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ITermValueSetConceptDesignationDao myValueSetConceptDesignationDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private JpaStorageSettings myStorageSettings;
|
||||||
|
|
||||||
|
public ValueSetConceptAccumulator create(TermValueSet theTermValueSet) {
|
||||||
|
ValueSetConceptAccumulator valueSetConceptAccumulator = new ValueSetConceptAccumulator(
|
||||||
|
theTermValueSet, myValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao);
|
||||||
|
|
||||||
|
valueSetConceptAccumulator.setSupportLegacyLob(myStorageSettings.isWriteToLegacyLobColumns());
|
||||||
|
|
||||||
|
return valueSetConceptAccumulator;
|
||||||
|
}
|
||||||
|
}
|
|
@ -19,6 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.jpa.term.config;
|
package ca.uhn.fhir.jpa.term.config;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.term.TermConceptDaoSvc;
|
import ca.uhn.fhir.jpa.term.TermConceptDaoSvc;
|
||||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
|
||||||
|
@ -41,7 +42,7 @@ public class TermCodeSystemConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public TermConceptDaoSvc termConceptDaoSvc() {
|
public TermConceptDaoSvc termConceptDaoSvc(JpaStorageSettings theJpaStorageSettings) {
|
||||||
return new TermConceptDaoSvc();
|
return new TermConceptDaoSvc().setSupportLegacyLob(theJpaStorageSettings.isWriteToLegacyLobColumns());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.batch2.api.JobOperationResultJson;
|
||||||
import ca.uhn.fhir.batch2.model.FetchJobInstancesRequest;
|
import ca.uhn.fhir.batch2.model.FetchJobInstancesRequest;
|
||||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||||
|
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
|
@ -31,6 +32,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.eq;
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
|
import static org.mockito.Mockito.verifyNoInteractions;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
@ExtendWith(MockitoExtension.class)
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
|
|
@ -30,6 +30,8 @@ import org.springframework.transaction.support.TransactionCallback;
|
||||||
import org.springframework.transaction.support.TransactionTemplate;
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
import java.time.LocalDateTime;
|
import java.time.LocalDateTime;
|
||||||
import java.time.ZoneId;
|
import java.time.ZoneId;
|
||||||
import java.time.temporal.ChronoUnit;
|
import java.time.temporal.ChronoUnit;
|
||||||
|
@ -43,6 +45,7 @@ import java.util.stream.IntStream;
|
||||||
import static org.exparity.hamcrest.date.DateMatchers.within;
|
import static org.exparity.hamcrest.date.DateMatchers.within;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyString;
|
import static org.mockito.ArgumentMatchers.anyString;
|
||||||
import static org.mockito.ArgumentMatchers.eq;
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
|
@ -97,7 +100,17 @@ public class BulkDataExportJobSchedulingHelperImplTest {
|
||||||
verify(myJpaJobPersistence, never()).deleteInstanceAndChunks(anyString());
|
verify(myJpaJobPersistence, never()).deleteInstanceAndChunks(anyString());
|
||||||
|
|
||||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.SECOND), DateUtils.truncate(cutoffDate, Calendar.SECOND));
|
Date expectedCutoff = computeDateFromConfig(expectedRetentionHours);
|
||||||
|
verifyDatesWithinSeconds(expectedCutoff, cutoffDate, 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void verifyDatesWithinSeconds(Date theExpected, Date theActual, int theSeconds) {
|
||||||
|
Instant expectedInstant = theExpected.toInstant();
|
||||||
|
Instant actualInstant = theActual.toInstant();
|
||||||
|
|
||||||
|
String msg = String.format("Expected time not within %d s", theSeconds);
|
||||||
|
assertTrue(expectedInstant.plus(theSeconds, ChronoUnit.SECONDS).isAfter(actualInstant), msg);
|
||||||
|
assertTrue(expectedInstant.minus(theSeconds, ChronoUnit.SECONDS).isBefore(actualInstant), msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.expunge;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ch.qos.logback.classic.Logger;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.InjectMocks;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.Spy;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.Mockito.never;
|
||||||
|
import static org.mockito.Mockito.spy;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
public class JpaResourceExpungeServiceTest {
|
||||||
|
private static final Logger ourLog = (Logger) LoggerFactory.getLogger(JpaResourceExpungeService.class);
|
||||||
|
|
||||||
|
@InjectMocks
|
||||||
|
@Spy
|
||||||
|
private JpaResourceExpungeService myService = spy(new JpaResourceExpungeService());
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private IResourceTableDao myResourceTableDao;
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private RequestDetails myRequestDetails;
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private ResourceTable resourceTable;
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private JpaStorageSettings myStorageSettings;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testExpungeDoesNotDeleteAllSearchParams() {
|
||||||
|
when(myResourceTableDao.findById(any())).thenReturn(Optional.of(resourceTable));
|
||||||
|
when(resourceTable.getIdDt()).thenReturn(new IdDt());
|
||||||
|
|
||||||
|
myService.expungeCurrentVersionOfResource(myRequestDetails, 1L, new AtomicInteger(1));
|
||||||
|
|
||||||
|
verify(myService, never()).deleteAllSearchParams(any());
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,8 +2,11 @@ package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
import com.google.common.base.Strings;
|
import com.google.common.base.Strings;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.ValueSource;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
import static org.hamcrest.Matchers.startsWith;
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
@ -21,8 +24,8 @@ public class TermConceptPropertyTest {
|
||||||
termConceptProperty.setValue(ourVeryLongString);
|
termConceptProperty.setValue(ourVeryLongString);
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThat(termConceptProperty.getValueBlobForTesting(), notNullValue());
|
assertThat(termConceptProperty.hasValueBlobForTesting(), equalTo(true));
|
||||||
assertThat(termConceptProperty.getValueBinForTesting(), notNullValue());
|
assertThat(termConceptProperty.hasValueBinForTesting(), equalTo(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -78,4 +81,19 @@ public class TermConceptPropertyTest {
|
||||||
assertThat(value, startsWith("a"));
|
assertThat(value, startsWith("a"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@ValueSource(booleans = {false, true})
|
||||||
|
public void testSetValue_withSupportLegacyLob(boolean theSupportLegacyLob){
|
||||||
|
// given
|
||||||
|
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||||
|
|
||||||
|
// when
|
||||||
|
termConceptProperty.setValue(ourVeryLongString);
|
||||||
|
termConceptProperty.performLegacyLobSupport(theSupportLegacyLob);
|
||||||
|
|
||||||
|
// then
|
||||||
|
assertThat(termConceptProperty.hasValueBinForTesting(), equalTo(true));
|
||||||
|
assertThat(termConceptProperty.hasValueBlobForTesting(), equalTo(theSupportLegacyLob));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
package ca.uhn.fhir.util;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We can't test this class from hapi-fhir-base due to the dependency restrictions (see the IMPORTANT NOT in the pom.xml
|
||||||
|
* That's why we're testing the class from this module.
|
||||||
|
*/
|
||||||
|
class SearchParameterUtilTest {
|
||||||
|
private static final String COVERAGE_BENEFICIARY = "Coverage.beneficiary";
|
||||||
|
private static final String PATIENT_LINK_OTHER = "Patient.link.other";
|
||||||
|
private static final String RESEARCH_SUBJECT_INDIVIDUAL = "ResearchSubject.individual";
|
||||||
|
|
||||||
|
private static final String SUPER_LONG_FHIR_PATH = "Account.subject.where(resolve() is Patient) | AdverseEvent.subject.where(resolve() is Patient) | AllergyIntolerance.patient | Appointment.participant.actor.where(resolve() is Patient) | Appointment.subject.where(resolve() is Patient) | AppointmentResponse.actor.where(resolve() is Patient) | AuditEvent.patient | Basic.subject.where(resolve() is Patient) | BodyStructure.patient | CarePlan.subject.where(resolve() is Patient) | CareTeam.subject.where(resolve() is Patient) | ChargeItem.subject.where(resolve() is Patient) | Claim.patient | ClaimResponse.patient | ClinicalImpression.subject.where(resolve() is Patient) | Communication.subject.where(resolve() is Patient) | CommunicationRequest.subject.where(resolve() is Patient) | Composition.subject.where(resolve() is Patient) | Condition.subject.where(resolve() is Patient) | Consent.subject.where(resolve() is Patient) | Contract.subject.where(resolve() is Patient) | Coverage.beneficiary | CoverageEligibilityRequest.patient | CoverageEligibilityResponse.patient | DetectedIssue.subject.where(resolve() is Patient) | DeviceRequest.subject.where(resolve() is Patient) | DeviceUsage.patient | DiagnosticReport.subject.where(resolve() is Patient) | DocumentReference.subject.where(resolve() is Patient) | Encounter.subject.where(resolve() is Patient) | EnrollmentRequest.candidate | EpisodeOfCare.patient | ExplanationOfBenefit.patient | FamilyMemberHistory.patient | Flag.subject.where(resolve() is Patient) | Goal.subject.where(resolve() is Patient) | GuidanceResponse.subject.where(resolve() is Patient) | ImagingSelection.subject.where(resolve() is Patient) | ImagingStudy.subject.where(resolve() is Patient) | Immunization.patient | ImmunizationEvaluation.patient | ImmunizationRecommendation.patient | Invoice.subject.where(resolve() is Patient) | List.subject.where(resolve() is Patient) | MeasureReport.subject.where(resolve() is Patient) | MedicationAdministration.subject.where(resolve() is Patient) | MedicationDispense.subject.where(resolve() is Patient) | MedicationRequest.subject.where(resolve() is Patient) | MedicationStatement.subject.where(resolve() is Patient) | MolecularSequence.subject.where(resolve() is Patient) | NutritionIntake.subject.where(resolve() is Patient) | NutritionOrder.subject.where(resolve() is Patient) | Observation.subject.where(resolve() is Patient) | Person.link.target.where(resolve() is Patient) | Procedure.subject.where(resolve() is Patient) | Provenance.patient | QuestionnaireResponse.subject.where(resolve() is Patient) | RelatedPerson.patient | RequestOrchestration.subject.where(resolve() is Patient) | ResearchSubject.subject.where(resolve() is Patient) | RiskAssessment.subject.where(resolve() is Patient) | ServiceRequest.subject.where(resolve() is Patient) | Specimen.subject.where(resolve() is Patient) | SupplyDelivery.patient | SupplyRequest.deliverFor | Task.for.where(resolve() is Patient) | VisionPrescription.patient";
|
||||||
|
|
||||||
|
public static Stream<Arguments> fhirVersionAndResourceType() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(FhirVersionEnum.DSTU3, "Coverage", COVERAGE_BENEFICIARY),
|
||||||
|
Arguments.of(FhirVersionEnum.R4, "Coverage", COVERAGE_BENEFICIARY),
|
||||||
|
Arguments.of(FhirVersionEnum.R4B, "Coverage", COVERAGE_BENEFICIARY),
|
||||||
|
Arguments.of(FhirVersionEnum.R5, "Coverage", SUPER_LONG_FHIR_PATH),
|
||||||
|
Arguments.of(FhirVersionEnum.DSTU3, "Patient", PATIENT_LINK_OTHER),
|
||||||
|
Arguments.of(FhirVersionEnum.R4, "Patient", PATIENT_LINK_OTHER),
|
||||||
|
Arguments.of(FhirVersionEnum.R4B, "Patient", PATIENT_LINK_OTHER),
|
||||||
|
Arguments.of(FhirVersionEnum.R5, "Patient", PATIENT_LINK_OTHER),
|
||||||
|
Arguments.of(FhirVersionEnum.DSTU3, "ResearchSubject", RESEARCH_SUBJECT_INDIVIDUAL),
|
||||||
|
Arguments.of(FhirVersionEnum.R4, "ResearchSubject", RESEARCH_SUBJECT_INDIVIDUAL),
|
||||||
|
Arguments.of(FhirVersionEnum.R4B, "ResearchSubject", RESEARCH_SUBJECT_INDIVIDUAL),
|
||||||
|
Arguments.of(FhirVersionEnum.R5, "ResearchSubject", SUPER_LONG_FHIR_PATH)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("fhirVersionAndResourceType")
|
||||||
|
void getOnlyPatientSearchParamForResourceType(FhirVersionEnum theFhirVersion, String theResourceType, String theExpectedPath) {
|
||||||
|
final Optional<RuntimeSearchParam> optRuntimeSearchParam = SearchParameterUtil.getOnlyPatientSearchParamForResourceType(FhirContext.forCached(theFhirVersion), theResourceType);
|
||||||
|
|
||||||
|
assertTrue(optRuntimeSearchParam.isPresent());
|
||||||
|
final RuntimeSearchParam runtimeSearchParam = optRuntimeSearchParam.get();
|
||||||
|
assertEquals(theExpectedPath, runtimeSearchParam.getPath());
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -42,7 +42,10 @@ public interface IIpsGenerationStrategy {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method returns the profile associated with the IPS document
|
* This method returns the profile associated with the IPS document
|
||||||
* generated by this strategy.
|
* generated by this strategy. This URL will be added to generated
|
||||||
|
* IPS Bundles in <code>Bundle.meta.profile</code>, and can also
|
||||||
|
* be used to support the <code>profile</code> parameter on the
|
||||||
|
* <code>$summary</code> operation.
|
||||||
*/
|
*/
|
||||||
String getBundleProfile();
|
String getBundleProfile();
|
||||||
|
|
||||||
|
|
|
@ -146,6 +146,7 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc {
|
||||||
bundleBuilder.setType(Bundle.BundleType.DOCUMENT.toCode());
|
bundleBuilder.setType(Bundle.BundleType.DOCUMENT.toCode());
|
||||||
bundleBuilder.setIdentifier("urn:ietf:rfc:4122", UUID.randomUUID().toString());
|
bundleBuilder.setIdentifier("urn:ietf:rfc:4122", UUID.randomUUID().toString());
|
||||||
bundleBuilder.setTimestamp(InstantType.now());
|
bundleBuilder.setTimestamp(InstantType.now());
|
||||||
|
bundleBuilder.addProfile(theStrategy.getBundleProfile());
|
||||||
|
|
||||||
// Add composition to document
|
// Add composition to document
|
||||||
bundleBuilder.addDocumentEntry(composition);
|
bundleBuilder.addDocumentEntry(composition);
|
||||||
|
|
|
@ -5,7 +5,6 @@ import ca.uhn.fhir.context.support.ConceptValidationOptions;
|
||||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|
||||||
import ca.uhn.fhir.jpa.ips.api.IIpsGenerationStrategy;
|
import ca.uhn.fhir.jpa.ips.api.IIpsGenerationStrategy;
|
||||||
import ca.uhn.fhir.jpa.ips.jpa.DefaultJpaIpsGenerationStrategy;
|
import ca.uhn.fhir.jpa.ips.jpa.DefaultJpaIpsGenerationStrategy;
|
||||||
import ca.uhn.fhir.jpa.ips.provider.IpsOperationProvider;
|
import ca.uhn.fhir.jpa.ips.provider.IpsOperationProvider;
|
||||||
|
@ -18,6 +17,7 @@ import ca.uhn.fhir.validation.SingleValidationMessage;
|
||||||
import ca.uhn.fhir.validation.ValidationResult;
|
import ca.uhn.fhir.validation.ValidationResult;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.hl7.fhir.common.hapi.validation.support.NpmPackageValidationSupport;
|
||||||
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
|
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
|
||||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
@ -31,6 +31,7 @@ import org.hl7.fhir.r4.model.Immunization;
|
||||||
import org.hl7.fhir.r4.model.MedicationStatement;
|
import org.hl7.fhir.r4.model.MedicationStatement;
|
||||||
import org.hl7.fhir.r4.model.Parameters;
|
import org.hl7.fhir.r4.model.Parameters;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.hl7.fhir.r4.model.PrimitiveType;
|
||||||
import org.hl7.fhir.r4.model.Reference;
|
import org.hl7.fhir.r4.model.Reference;
|
||||||
import org.hl7.fhir.r4.model.Resource;
|
import org.hl7.fhir.r4.model.Resource;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
@ -41,17 +42,19 @@ import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.test.context.ContextConfiguration;
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.util.BundleUtil.convertBundleIntoTransaction;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.contains;
|
import static org.hamcrest.Matchers.contains;
|
||||||
import static org.hamcrest.Matchers.stringContainsInOrder;
|
import static org.hamcrest.Matchers.stringContainsInOrder;
|
||||||
|
import static org.hibernate.validator.internal.util.Contracts.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This test uses a complete R4 JPA server as a backend and wires the
|
* This test uses a complete R4 JPA server as a backend and wires the
|
||||||
|
@ -77,13 +80,9 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGenerateLargePatientSummary() {
|
public void testGenerateLargePatientSummary() throws IOException {
|
||||||
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/large-patient-everything.json.gz");
|
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/large-patient-everything.json.gz");
|
||||||
sourceData.setType(Bundle.BundleType.TRANSACTION);
|
sourceData = convertBundleIntoTransaction(myFhirContext, sourceData, null);
|
||||||
for (Bundle.BundleEntryComponent nextEntry : sourceData.getEntry()) {
|
|
||||||
nextEntry.getRequest().setMethod(Bundle.HTTPVerb.PUT);
|
|
||||||
nextEntry.getRequest().setUrl(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
|
||||||
}
|
|
||||||
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
||||||
ourLog.info("Created {} resources", outcome.getEntry().size());
|
ourLog.info("Created {} resources", outcome.getEntry().size());
|
||||||
|
|
||||||
|
@ -97,6 +96,9 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
ourLog.info("Output: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
|
ourLog.info("Output: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
|
||||||
|
|
||||||
// Verify
|
// Verify
|
||||||
|
assertThat(output.getMeta().getProfile().stream().map(PrimitiveType::getValue).toList(), contains(
|
||||||
|
"http://hl7.org/fhir/uv/ips/StructureDefinition/Bundle-uv-ips"
|
||||||
|
));
|
||||||
validateDocument(output);
|
validateDocument(output);
|
||||||
assertEquals(117, output.getEntry().size());
|
assertEquals(117, output.getEntry().size());
|
||||||
String patientId = findFirstEntryResource(output, Patient.class, 1).getIdElement().toUnqualifiedVersionless().getValue();
|
String patientId = findFirstEntryResource(output, Patient.class, 1).getIdElement().toUnqualifiedVersionless().getValue();
|
||||||
|
@ -114,11 +116,7 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
myStorageSettings.setResourceClientIdStrategy(JpaStorageSettings.ClientIdStrategyEnum.ANY);
|
myStorageSettings.setResourceClientIdStrategy(JpaStorageSettings.ClientIdStrategyEnum.ANY);
|
||||||
|
|
||||||
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/large-patient-everything-2.json.gz");
|
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/large-patient-everything-2.json.gz");
|
||||||
sourceData.setType(Bundle.BundleType.TRANSACTION);
|
sourceData = convertBundleIntoTransaction(myFhirContext, sourceData, null);
|
||||||
for (Bundle.BundleEntryComponent nextEntry : sourceData.getEntry()) {
|
|
||||||
nextEntry.getRequest().setMethod(Bundle.HTTPVerb.PUT);
|
|
||||||
nextEntry.getRequest().setUrl(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
|
||||||
}
|
|
||||||
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
||||||
ourLog.info("Created {} resources", outcome.getEntry().size());
|
ourLog.info("Created {} resources", outcome.getEntry().size());
|
||||||
|
|
||||||
|
@ -140,11 +138,7 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
myStorageSettings.setResourceClientIdStrategy(JpaStorageSettings.ClientIdStrategyEnum.ANY);
|
myStorageSettings.setResourceClientIdStrategy(JpaStorageSettings.ClientIdStrategyEnum.ANY);
|
||||||
|
|
||||||
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/large-patient-everything-3.json.gz");
|
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/large-patient-everything-3.json.gz");
|
||||||
sourceData.setType(Bundle.BundleType.TRANSACTION);
|
sourceData = convertBundleIntoTransaction(myFhirContext, sourceData, null);
|
||||||
for (Bundle.BundleEntryComponent nextEntry : sourceData.getEntry()) {
|
|
||||||
nextEntry.getRequest().setMethod(Bundle.HTTPVerb.PUT);
|
|
||||||
nextEntry.getRequest().setUrl(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
|
||||||
}
|
|
||||||
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
||||||
ourLog.info("Created {} resources", outcome.getEntry().size());
|
ourLog.info("Created {} resources", outcome.getEntry().size());
|
||||||
|
|
||||||
|
@ -162,15 +156,32 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGenerateTinyPatientSummary() {
|
public void testGenerateLargePatientSummary4() {
|
||||||
|
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/large-patient-everything-4.json.gz");
|
||||||
|
sourceData = convertBundleIntoTransaction(myFhirContext, sourceData, "EPD");
|
||||||
|
|
||||||
|
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
||||||
|
ourLog.info("Created {} resources", outcome.getEntry().size());
|
||||||
|
|
||||||
|
Bundle output = myClient
|
||||||
|
.operation()
|
||||||
|
.onInstance("Patient/EPD2223")
|
||||||
|
.named(JpaConstants.OPERATION_SUMMARY)
|
||||||
|
.withNoParameters(Parameters.class)
|
||||||
|
.returnResourceType(Bundle.class)
|
||||||
|
.execute();
|
||||||
|
ourLog.info("Output: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
assertEquals(55, output.getEntry().size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGenerateTinyPatientSummary() throws IOException {
|
||||||
myStorageSettings.setResourceClientIdStrategy(JpaStorageSettings.ClientIdStrategyEnum.ANY);
|
myStorageSettings.setResourceClientIdStrategy(JpaStorageSettings.ClientIdStrategyEnum.ANY);
|
||||||
|
|
||||||
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/tiny-patient-everything.json.gz");
|
Bundle sourceData = ClasspathUtil.loadCompressedResource(myFhirContext, Bundle.class, "/tiny-patient-everything.json.gz");
|
||||||
sourceData.setType(Bundle.BundleType.TRANSACTION);
|
sourceData = convertBundleIntoTransaction(myFhirContext, sourceData, null);
|
||||||
for (Bundle.BundleEntryComponent nextEntry : sourceData.getEntry()) {
|
|
||||||
nextEntry.getRequest().setMethod(Bundle.HTTPVerb.PUT);
|
|
||||||
nextEntry.getRequest().setUrl(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
|
||||||
}
|
|
||||||
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
Bundle outcome = mySystemDao.transaction(mySrd, sourceData);
|
||||||
ourLog.info("Created {} resources", outcome.getEntry().size());
|
ourLog.info("Created {} resources", outcome.getEntry().size());
|
||||||
|
|
||||||
|
@ -251,7 +262,7 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private static Composition findCompositionSectionByDisplay(Bundle output, String theDisplay) {
|
private static Composition findCompositionSectionByDisplay(Bundle output, @SuppressWarnings("SameParameterValue") String theDisplay) {
|
||||||
Composition composition = (Composition) output.getEntry().get(0).getResource();
|
Composition composition = (Composition) output.getEntry().get(0).getResource();
|
||||||
Composition.SectionComponent section = composition
|
Composition.SectionComponent section = composition
|
||||||
.getSection()
|
.getSection()
|
||||||
|
@ -259,6 +270,7 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
.filter(t -> t.getCode().getCoding().get(0).getDisplay().equals(theDisplay))
|
.filter(t -> t.getCode().getCoding().get(0).getDisplay().equals(theDisplay))
|
||||||
.findFirst()
|
.findFirst()
|
||||||
.orElseThrow();
|
.orElseThrow();
|
||||||
|
assertNotNull(section);
|
||||||
return composition;
|
return composition;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -266,23 +278,26 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private static List<String> extractSectionTitles(Bundle outcome) {
|
private static List<String> extractSectionTitles(Bundle outcome) {
|
||||||
Composition composition = (Composition) outcome.getEntry().get(0).getResource();
|
Composition composition = (Composition) outcome.getEntry().get(0).getResource();
|
||||||
List<String> sectionTitles = composition
|
return composition
|
||||||
.getSection()
|
.getSection()
|
||||||
.stream()
|
.stream()
|
||||||
.map(Composition.SectionComponent::getTitle)
|
.map(Composition.SectionComponent::getTitle)
|
||||||
.toList();
|
.toList();
|
||||||
return sectionTitles;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void validateDocument(Bundle theOutcome) {
|
private void validateDocument(Bundle theOutcome) throws IOException {
|
||||||
FhirValidator validator = myFhirContext.newValidator();
|
FhirValidator validator = myFhirContext.newValidator();
|
||||||
FhirInstanceValidator instanceValidator = new FhirInstanceValidator(myFhirContext);
|
FhirInstanceValidator instanceValidator = new FhirInstanceValidator(myFhirContext);
|
||||||
instanceValidator.setValidationSupport(new ValidationSupportChain(new IpsTerminologySvc(), myFhirContext.getValidationSupport()));
|
|
||||||
|
NpmPackageValidationSupport npmSupport = new NpmPackageValidationSupport(myFhirContext);
|
||||||
|
npmSupport.loadPackageFromClasspath("/ips-package-1.1.0.tgz");
|
||||||
|
|
||||||
|
instanceValidator.setValidationSupport(new ValidationSupportChain(npmSupport, new IpsTerminologySvc(), myFhirContext.getValidationSupport()));
|
||||||
validator.registerValidatorModule(instanceValidator);
|
validator.registerValidatorModule(instanceValidator);
|
||||||
ValidationResult validation = validator.validateWithResult(theOutcome);
|
ValidationResult validation = validator.validateWithResult(theOutcome);
|
||||||
|
|
||||||
Optional<SingleValidationMessage> failure = validation.getMessages().stream().filter(t -> t.getSeverity().ordinal() >= ResultSeverityEnum.ERROR.ordinal()).findFirst();
|
Optional<SingleValidationMessage> failure = validation.getMessages().stream().filter(t -> t.getSeverity().ordinal() >= ResultSeverityEnum.ERROR.ordinal()).findFirst();
|
||||||
assertFalse(failure.isPresent(), () -> failure.get().toString());
|
assertFalse(failure.isPresent(), () -> failure.orElseThrow().toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
|
@ -294,7 +309,7 @@ public class IpsGenerationR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public IIpsGeneratorSvc ipsGeneratorSvc(FhirContext theFhirContext, IIpsGenerationStrategy theGenerationStrategy, DaoRegistry theDaoRegistry) {
|
public IIpsGeneratorSvc ipsGeneratorSvc(FhirContext theFhirContext, IIpsGenerationStrategy theGenerationStrategy) {
|
||||||
return new IpsGeneratorSvcImpl(theFhirContext, theGenerationStrategy);
|
return new IpsGeneratorSvcImpl(theFhirContext, theGenerationStrategy);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.3.0-SNAPSHOT</version>
|
<version>7.3.2-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -166,6 +166,7 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc {
|
||||||
HookParams params = new HookParams();
|
HookParams params = new HookParams();
|
||||||
params.add(MdmMergeEvent.class, event);
|
params.add(MdmMergeEvent.class, event);
|
||||||
params.add(RequestDetails.class, theParams.getRequestDetails());
|
params.add(RequestDetails.class, theParams.getRequestDetails());
|
||||||
|
params.add(MdmTransactionContext.class, theParams.getMdmTransactionContext());
|
||||||
myInterceptorBroadcaster.callHooks(Pointcut.MDM_POST_MERGE_GOLDEN_RESOURCES, params);
|
myInterceptorBroadcaster.callHooks(Pointcut.MDM_POST_MERGE_GOLDEN_RESOURCES, params);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,11 +2,16 @@ package ca.uhn.fhir.jpa.mdm.svc;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
|
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
|
||||||
import ca.uhn.fhir.mdm.api.IMdmSurvivorshipService;
|
import ca.uhn.fhir.mdm.api.IMdmSurvivorshipService;
|
||||||
|
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
|
||||||
|
import ca.uhn.fhir.mdm.api.MdmMatchOutcome;
|
||||||
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
|
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
|
||||||
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
|
@ -55,4 +60,21 @@ class MdmSurvivorshipSvcImplIT extends BaseMdmR4Test {
|
||||||
assertEquals(p1.getTelecom().size(), p1.getTelecom().size());
|
assertEquals(p1.getTelecom().size(), p1.getTelecom().size());
|
||||||
assertTrue(p2.getTelecomFirstRep().equalsDeep(p1.getTelecomFirstRep()));
|
assertTrue(p2.getTelecomFirstRep().equalsDeep(p1.getTelecomFirstRep()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void matchingPatientsWith_NON_Numeric_Ids_matches_doesNotThrow_NumberFormatException() {
|
||||||
|
final Patient frankPatient1 = buildFrankPatient();
|
||||||
|
frankPatient1.setId("patA");
|
||||||
|
myPatientDao.update(frankPatient1, new SystemRequestDetails());
|
||||||
|
final Patient frankPatient2 = buildFrankPatient();
|
||||||
|
frankPatient2.setId("patB");
|
||||||
|
myPatientDao.update(frankPatient2, new SystemRequestDetails());
|
||||||
|
final Patient goldenPatient = buildFrankPatient();
|
||||||
|
myPatientDao.create(goldenPatient, new SystemRequestDetails());
|
||||||
|
|
||||||
|
myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, frankPatient1, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient"));
|
||||||
|
myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, frankPatient2, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient"));
|
||||||
|
|
||||||
|
myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(goldenPatient, new MdmTransactionContext(MdmTransactionContext.OperationType.UPDATE_LINK));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue