Merge remote-tracking branch 'origin/master' into do-20231213-core-bump-6-2-6
This commit is contained in:
commit
2009e78920
|
@ -8,6 +8,11 @@ tab_width = 4
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
|
|
||||||
|
[*.html]
|
||||||
|
indent_style = tab
|
||||||
|
tab_width = 3
|
||||||
|
indent_size = 3
|
||||||
|
|
||||||
[*.xml]
|
[*.xml]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
tab_width = 3
|
tab_width = 3
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -29,4 +29,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
getterVisibility = JsonAutoDetect.Visibility.NONE,
|
getterVisibility = JsonAutoDetect.Visibility.NONE,
|
||||||
isGetterVisibility = JsonAutoDetect.Visibility.NONE,
|
isGetterVisibility = JsonAutoDetect.Visibility.NONE,
|
||||||
setterVisibility = JsonAutoDetect.Visibility.NONE)
|
setterVisibility = JsonAutoDetect.Visibility.NONE)
|
||||||
public interface IModelJson {}
|
public interface IModelJson {
|
||||||
|
String SENSITIVE_DATA_FILTER_NAME = "sensitiveDataFilter";
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Core Library
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.model.api.annotation;
|
||||||
|
|
||||||
|
import java.lang.annotation.ElementType;
|
||||||
|
import java.lang.annotation.Retention;
|
||||||
|
import java.lang.annotation.RetentionPolicy;
|
||||||
|
import java.lang.annotation.Target;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Annotation to mark a field as sensitive, indicating that it should not
|
||||||
|
* be displayed or serialized by jackson. The only way to serialize an object annotated with this annotation is to use
|
||||||
|
* {@link ca.uhn.fhir.util.JsonUtil}, as it has a registered filter against this annotation.
|
||||||
|
*/
|
||||||
|
@Retention(RetentionPolicy.RUNTIME)
|
||||||
|
@Target(ElementType.FIELD)
|
||||||
|
public @interface SensitiveNoDisplay {}
|
|
@ -91,7 +91,7 @@ public enum BundleTypeEnum {
|
||||||
/**
|
/**
|
||||||
* Returns the enumerated value associated with this code
|
* Returns the enumerated value associated with this code
|
||||||
*/
|
*/
|
||||||
public BundleTypeEnum forCode(String theCode) {
|
public static BundleTypeEnum forCode(String theCode) {
|
||||||
BundleTypeEnum retVal = CODE_TO_ENUM.get(theCode);
|
BundleTypeEnum retVal = CODE_TO_ENUM.get(theCode);
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.fhirpath.IFhirPathEvaluationContext;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.narrative2.BaseNarrativeGenerator;
|
import ca.uhn.fhir.narrative2.BaseNarrativeGenerator;
|
||||||
import ca.uhn.fhir.narrative2.INarrativeTemplate;
|
import ca.uhn.fhir.narrative2.INarrativeTemplate;
|
||||||
|
import ca.uhn.fhir.narrative2.NarrativeGeneratorTemplateUtils;
|
||||||
import ca.uhn.fhir.narrative2.TemplateTypeEnum;
|
import ca.uhn.fhir.narrative2.TemplateTypeEnum;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
@ -109,6 +110,7 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener
|
||||||
Context context = new Context();
|
Context context = new Context();
|
||||||
context.setVariable("resource", theTargetContext);
|
context.setVariable("resource", theTargetContext);
|
||||||
context.setVariable("context", theTargetContext);
|
context.setVariable("context", theTargetContext);
|
||||||
|
context.setVariable("narrativeUtil", NarrativeGeneratorTemplateUtils.INSTANCE);
|
||||||
context.setVariable(
|
context.setVariable(
|
||||||
"fhirVersion", theFhirContext.getVersion().getVersion().name());
|
"fhirVersion", theFhirContext.getVersion().getVersion().name());
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,53 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Core Library
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.narrative2;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.util.BundleUtil;
|
||||||
|
import org.apache.commons.lang3.tuple.Pair;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An instance of this class is added to the Thymeleaf context as a variable with
|
||||||
|
* name <code>"narrativeUtil"</code> and can be accessed from narrative templates.
|
||||||
|
*
|
||||||
|
* @since 7.0.0
|
||||||
|
*/
|
||||||
|
public class NarrativeGeneratorTemplateUtils {
|
||||||
|
|
||||||
|
public static final NarrativeGeneratorTemplateUtils INSTANCE = new NarrativeGeneratorTemplateUtils();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a Bundle as input, are any entries present with a given resource type
|
||||||
|
*/
|
||||||
|
public boolean bundleHasEntriesWithResourceType(IBaseBundle theBaseBundle, String theResourceType) {
|
||||||
|
FhirContext ctx = theBaseBundle.getStructureFhirVersionEnum().newContextCached();
|
||||||
|
List<Pair<String, IBaseResource>> entryResources =
|
||||||
|
BundleUtil.getBundleEntryUrlsAndResources(ctx, theBaseBundle);
|
||||||
|
return entryResources.stream()
|
||||||
|
.map(Pair::getValue)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.anyMatch(t -> ctx.getResourceType(t).equals(theResourceType));
|
||||||
|
}
|
||||||
|
}
|
|
@ -1413,14 +1413,18 @@ class ParserState<T> {
|
||||||
myErrorHandler.invalidValue(location, value, "Attribute value must not be empty (\"\")");
|
myErrorHandler.invalidValue(location, value, "Attribute value must not be empty (\"\")");
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
/*
|
|
||||||
* It may be possible to clean this up somewhat once the following PR is hopefully merged:
|
|
||||||
* https://github.com/FasterXML/jackson-core/pull/611
|
|
||||||
*
|
|
||||||
* See TolerantJsonParser
|
|
||||||
*/
|
|
||||||
if ("decimal".equals(myTypeName)) {
|
if ("decimal".equals(myTypeName)) {
|
||||||
if (value != null)
|
if (value != null) {
|
||||||
|
// remove leading plus sign from decimal value
|
||||||
|
if (value.startsWith("+")) {
|
||||||
|
value = value.substring(1);
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* It may be possible to clean this up somewhat once the following PR is hopefully merged:
|
||||||
|
* https://github.com/FasterXML/jackson-core/pull/611
|
||||||
|
*
|
||||||
|
* See TolerantJsonParser
|
||||||
|
*/
|
||||||
if (value.startsWith(".") && NumberUtils.isDigits(value.substring(1))) {
|
if (value.startsWith(".") && NumberUtils.isDigits(value.substring(1))) {
|
||||||
value = "0" + value;
|
value = "0" + value;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1428,6 +1432,7 @@ class ParserState<T> {
|
||||||
value = value.substring(1);
|
value = value.substring(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -30,6 +30,7 @@ import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
import com.fasterxml.jackson.core.JsonParser;
|
import com.fasterxml.jackson.core.JsonParser;
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.fasterxml.jackson.core.StreamReadConstraints;
|
import com.fasterxml.jackson.core.StreamReadConstraints;
|
||||||
|
import com.fasterxml.jackson.core.json.JsonReadFeature;
|
||||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
@ -406,7 +407,9 @@ public class JacksonStructure implements JsonLikeStructure {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ObjectMapper createObjectMapper() {
|
private static ObjectMapper createObjectMapper() {
|
||||||
ObjectMapper retVal = JsonMapper.builder().build();
|
ObjectMapper retVal = JsonMapper.builder()
|
||||||
|
.enable(JsonReadFeature.ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS)
|
||||||
|
.build();
|
||||||
retVal = retVal.setNodeFactory(new JsonNodeFactory(true));
|
retVal = retVal.setNodeFactory(new JsonNodeFactory(true));
|
||||||
retVal = retVal.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS);
|
retVal = retVal.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS);
|
||||||
retVal = retVal.enable(DeserializationFeature.FAIL_ON_TRAILING_TOKENS);
|
retVal = retVal.enable(DeserializationFeature.FAIL_ON_TRAILING_TOKENS);
|
||||||
|
|
|
@ -25,6 +25,7 @@ import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -232,6 +233,8 @@ public class Constants {
|
||||||
public static final String PARAMQUALIFIER_NICKNAME = ":nickname";
|
public static final String PARAMQUALIFIER_NICKNAME = ":nickname";
|
||||||
public static final String PARAMQUALIFIER_TOKEN_OF_TYPE = ":of-type";
|
public static final String PARAMQUALIFIER_TOKEN_OF_TYPE = ":of-type";
|
||||||
public static final String PARAMQUALIFIER_TOKEN_NOT = ":not";
|
public static final String PARAMQUALIFIER_TOKEN_NOT = ":not";
|
||||||
|
public static final String PARAMQUALIFIER_TOKEN_IDENTIFIER = ":identifier";
|
||||||
|
|
||||||
public static final int STATUS_HTTP_200_OK = 200;
|
public static final int STATUS_HTTP_200_OK = 200;
|
||||||
public static final int STATUS_HTTP_201_CREATED = 201;
|
public static final int STATUS_HTTP_201_CREATED = 201;
|
||||||
public static final int STATUS_HTTP_204_NO_CONTENT = 204;
|
public static final int STATUS_HTTP_204_NO_CONTENT = 204;
|
||||||
|
@ -314,6 +317,17 @@ public class Constants {
|
||||||
public static final String PARAMQUALIFIER_TOKEN_NOT_IN = ":not-in";
|
public static final String PARAMQUALIFIER_TOKEN_NOT_IN = ":not-in";
|
||||||
public static final String PARAMQUALIFIER_TOKEN_ABOVE = ":above";
|
public static final String PARAMQUALIFIER_TOKEN_ABOVE = ":above";
|
||||||
public static final String PARAMQUALIFIER_TOKEN_BELOW = ":below";
|
public static final String PARAMQUALIFIER_TOKEN_BELOW = ":below";
|
||||||
|
|
||||||
|
public static final List<String> VALID_MODIFIERS = Collections.unmodifiableList(Arrays.asList(
|
||||||
|
PARAMQUALIFIER_STRING_CONTAINS,
|
||||||
|
PARAMQUALIFIER_STRING_EXACT,
|
||||||
|
PARAMQUALIFIER_TOKEN_IN,
|
||||||
|
PARAM_INCLUDE_QUALIFIER_ITERATE,
|
||||||
|
PARAMQUALIFIER_MISSING,
|
||||||
|
PARAMQUALIFIER_TOKEN_NOT_IN,
|
||||||
|
PARAMQUALIFIER_TOKEN_OF_TYPE,
|
||||||
|
PARAM_INCLUDE_QUALIFIER_RECURSE,
|
||||||
|
PARAMQUALIFIER_TOKEN_TEXT));
|
||||||
/**
|
/**
|
||||||
* The number of characters in a UUID (36)
|
* The number of characters in a UUID (36)
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
|
import ca.uhn.fhir.model.primitive.CodeDt;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hl7.fhir.instance.model.api.IBase;
|
import org.hl7.fhir.instance.model.api.IBase;
|
||||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||||
|
@ -41,8 +42,8 @@ public class AttachmentUtil {
|
||||||
return getOrCreateChild(theContext, theAttachment, "data", "base64Binary");
|
return getOrCreateChild(theContext, theAttachment, "data", "base64Binary");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IPrimitiveType<String> getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
|
public static IPrimitiveType<CodeDt> getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
|
||||||
return getOrCreateChild(theContext, theAttachment, "contentType", "string");
|
return getOrCreateChild(theContext, theAttachment, "contentType", "code");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IPrimitiveType<String> getOrCreateUrl(FhirContext theContext, ICompositeType theAttachment) {
|
public static IPrimitiveType<String> getOrCreateUrl(FhirContext theContext, ICompositeType theAttachment) {
|
||||||
|
|
|
@ -251,10 +251,22 @@ public class BundleBuilder {
|
||||||
* @param theResource The resource to create
|
* @param theResource The resource to create
|
||||||
*/
|
*/
|
||||||
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) {
|
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) {
|
||||||
|
return addTransactionCreateEntry(theResource, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds an entry containing an create (POST) request.
|
||||||
|
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
||||||
|
*
|
||||||
|
* @param theResource The resource to create
|
||||||
|
* @param theFullUrl The fullUrl to attach to the entry. If null, will default to the resource ID.
|
||||||
|
*/
|
||||||
|
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource, @Nullable String theFullUrl) {
|
||||||
setBundleField("type", "transaction");
|
setBundleField("type", "transaction");
|
||||||
|
|
||||||
IBase request =
|
IBase request = addEntryAndReturnRequest(
|
||||||
addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
|
theResource,
|
||||||
|
theFullUrl != null ? theFullUrl : theResource.getIdElement().getValue());
|
||||||
|
|
||||||
String resourceType = myContext.getResourceType(theResource);
|
String resourceType = myContext.getResourceType(theResource);
|
||||||
|
|
||||||
|
@ -423,7 +435,7 @@ public class BundleBuilder {
|
||||||
*/
|
*/
|
||||||
public void addCollectionEntry(IBaseResource theResource) {
|
public void addCollectionEntry(IBaseResource theResource) {
|
||||||
setType("collection");
|
setType("collection");
|
||||||
addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
|
addEntryAndReturnRequest(theResource);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -431,7 +443,7 @@ public class BundleBuilder {
|
||||||
*/
|
*/
|
||||||
public void addDocumentEntry(IBaseResource theResource) {
|
public void addDocumentEntry(IBaseResource theResource) {
|
||||||
setType("document");
|
setType("document");
|
||||||
addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
|
addEntryAndReturnRequest(theResource);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -463,6 +475,14 @@ public class BundleBuilder {
|
||||||
return (IBaseBackboneElement) searchInstance;
|
return (IBaseBackboneElement) searchInstance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private IBase addEntryAndReturnRequest(IBaseResource theResource) {
|
||||||
|
IIdType id = theResource.getIdElement();
|
||||||
|
if (id.hasVersionIdPart()) {
|
||||||
|
id = id.toVersionless();
|
||||||
|
}
|
||||||
|
return addEntryAndReturnRequest(theResource, id.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
private IBase addEntryAndReturnRequest(IBaseResource theResource, String theFullUrl) {
|
private IBase addEntryAndReturnRequest(IBaseResource theResource, String theFullUrl) {
|
||||||
Validate.notNull(theResource, "theResource must not be null");
|
Validate.notNull(theResource, "theResource must not be null");
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
@ -235,6 +236,14 @@ public class BundleUtil {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static BundleTypeEnum getBundleTypeEnum(FhirContext theContext, IBaseBundle theBundle) {
|
||||||
|
String bundleTypeCode = BundleUtil.getBundleType(theContext, theBundle);
|
||||||
|
if (isBlank(bundleTypeCode)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return BundleTypeEnum.forCode(bundleTypeCode);
|
||||||
|
}
|
||||||
|
|
||||||
public static void setBundleType(FhirContext theContext, IBaseBundle theBundle, String theType) {
|
public static void setBundleType(FhirContext theContext, IBaseBundle theBundle, String theType) {
|
||||||
RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle);
|
RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle);
|
||||||
BaseRuntimeChildDefinition entryChild = def.getChildByName("type");
|
BaseRuntimeChildDefinition entryChild = def.getChildByName("type");
|
||||||
|
|
|
@ -21,15 +21,23 @@ package ca.uhn.fhir.util;
|
||||||
|
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.model.api.IModelJson;
|
import ca.uhn.fhir.model.api.IModelJson;
|
||||||
|
import ca.uhn.fhir.model.api.annotation.SensitiveNoDisplay;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
|
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||||
|
import com.fasterxml.jackson.databind.ser.FilterProvider;
|
||||||
|
import com.fasterxml.jackson.databind.ser.PropertyWriter;
|
||||||
|
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
|
||||||
|
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
import java.io.StringWriter;
|
import java.io.StringWriter;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -38,15 +46,30 @@ public class JsonUtil {
|
||||||
|
|
||||||
private static final ObjectMapper ourMapperPrettyPrint;
|
private static final ObjectMapper ourMapperPrettyPrint;
|
||||||
private static final ObjectMapper ourMapperNonPrettyPrint;
|
private static final ObjectMapper ourMapperNonPrettyPrint;
|
||||||
|
private static final ObjectMapper ourMapperIncludeSensitive;
|
||||||
|
|
||||||
|
public static final SimpleBeanPropertyFilter SIMPLE_BEAN_PROPERTY_FILTER = new SensitiveDataFilter();
|
||||||
|
|
||||||
|
public static final SimpleFilterProvider SENSITIVE_DATA_FILTER_PROVIDER =
|
||||||
|
new SimpleFilterProvider().addFilter(IModelJson.SENSITIVE_DATA_FILTER_NAME, SIMPLE_BEAN_PROPERTY_FILTER);
|
||||||
|
public static final SimpleFilterProvider SHOW_ALL_DATA_FILTER_PROVIDER = new SimpleFilterProvider()
|
||||||
|
.addFilter(IModelJson.SENSITIVE_DATA_FILTER_NAME, SimpleBeanPropertyFilter.serializeAll());
|
||||||
|
|
||||||
static {
|
static {
|
||||||
ourMapperPrettyPrint = new ObjectMapper();
|
ourMapperPrettyPrint = new ObjectMapper();
|
||||||
ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
ourMapperPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
|
||||||
ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT);
|
ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
|
||||||
ourMapperNonPrettyPrint = new ObjectMapper();
|
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||||
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
ourMapperNonPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
|
||||||
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
|
||||||
|
ourMapperIncludeSensitive = new ObjectMapper();
|
||||||
|
ourMapperIncludeSensitive.setFilterProvider(SHOW_ALL_DATA_FILTER_PROVIDER);
|
||||||
|
ourMapperIncludeSensitive.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
ourMapperIncludeSensitive.disable(SerializationFeature.INDENT_OUTPUT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -67,6 +90,24 @@ public class JsonUtil {
|
||||||
public static <T> List<T> deserializeList(@Nonnull String theInput, @Nonnull Class<T> theType) throws IOException {
|
public static <T> List<T> deserializeList(@Nonnull String theInput, @Nonnull Class<T> theType) throws IOException {
|
||||||
return ourMapperPrettyPrint.readerForListOf(theType).readValue(theInput);
|
return ourMapperPrettyPrint.readerForListOf(theType).readValue(theInput);
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Parse JSON
|
||||||
|
*/
|
||||||
|
public static <T> T deserialize(@Nonnull InputStream theInput, @Nonnull Class<T> theType) throws IOException {
|
||||||
|
return ourMapperPrettyPrint.readerFor(theType).readValue(theInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Includes fields which are annotated with {@link SensitiveNoDisplay}. Currently only meant to be used for serialization
|
||||||
|
* for batch job parameters.
|
||||||
|
*/
|
||||||
|
public static String serializeWithSensitiveData(@Nonnull IModelJson theInput) {
|
||||||
|
try {
|
||||||
|
return ourMapperIncludeSensitive.writeValueAsString(theInput);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new InvalidRequestException(Msg.code(2487) + "Failed to encode " + theInput.getClass(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encode JSON
|
* Encode JSON
|
||||||
|
@ -93,6 +134,10 @@ public class JsonUtil {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public FilterProvider getSensitiveDataFilterProvider() {
|
||||||
|
return SENSITIVE_DATA_FILTER_PROVIDER;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encode JSON
|
* Encode JSON
|
||||||
*/
|
*/
|
||||||
|
@ -111,4 +156,26 @@ public class JsonUtil {
|
||||||
throw new InvalidRequestException(Msg.code(1741) + "Failed to encode " + theJson.getClass(), e);
|
throw new InvalidRequestException(Msg.code(1741) + "Failed to encode " + theJson.getClass(), e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static class SensitiveDataFilter extends SimpleBeanPropertyFilter {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean include(PropertyWriter writer) {
|
||||||
|
return true; // Default include all except explicitly checked and excluded
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serializeAsField(Object pojo, JsonGenerator gen, SerializerProvider provider, PropertyWriter writer)
|
||||||
|
throws Exception {
|
||||||
|
if (include(writer)) {
|
||||||
|
if (!isFieldSensitive(writer)) {
|
||||||
|
super.serializeAsField(pojo, gen, provider, writer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isFieldSensitive(PropertyWriter writer) {
|
||||||
|
return writer.getAnnotation(SensitiveNoDisplay.class) != null;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.util;
|
||||||
|
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
|
@ -82,6 +83,12 @@ public class ValidateUtil {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void isTrueOrThrowResourceNotFound(boolean theSuccess, String theMessage, Object... theValues) {
|
||||||
|
if (!theSuccess) {
|
||||||
|
throw new ResourceNotFoundException(Msg.code(2494) + String.format(theMessage, theValues));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static void exactlyOneNotNullOrThrowInvalidRequestException(Object[] theObjects, String theMessage) {
|
public static void exactlyOneNotNullOrThrowInvalidRequestException(Object[] theObjects, String theMessage) {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
for (Object next : theObjects) {
|
for (Object next : theObjects) {
|
||||||
|
|
|
@ -135,6 +135,8 @@ public enum VersionEnum {
|
||||||
V6_11_0,
|
V6_11_0,
|
||||||
|
|
||||||
V7_0_0,
|
V7_0_0,
|
||||||
|
V7_0_1,
|
||||||
|
|
||||||
V7_1_0,
|
V7_1_0,
|
||||||
V7_2_0;
|
V7_2_0;
|
||||||
|
|
||||||
|
|
|
@ -89,6 +89,7 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid m
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.inlineMatchNotSupported=Inline match URLs are not supported on this server. Cannot process reference: "{0}"
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.inlineMatchNotSupported=Inline match URLs are not supported on this server. Cannot process reference: "{0}"
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources
|
||||||
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteByUrlThresholdExceeded=Failed to DELETE resources with match URL "{0}" because the resolved number of resources: {1} exceeds the threshold of {2}
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithIdNotMatchFailure=Failed to {0} resource with match URL "{1}" because the matching resource does not match the provided ID
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithIdNotMatchFailure=Failed to {0} resource with match URL "{1}" because the matching resource does not match the provided ID
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
||||||
|
@ -210,3 +211,5 @@ ca.uhn.fhir.jpa.provider.DiffProvider.cantDiffDifferentTypes=Unable to diff two
|
||||||
|
|
||||||
ca.uhn.fhir.jpa.interceptor.validation.RuleRequireProfileDeclaration.noMatchingProfile=Resource of type "{0}" does not declare conformance to profile from: {1}
|
ca.uhn.fhir.jpa.interceptor.validation.RuleRequireProfileDeclaration.noMatchingProfile=Resource of type "{0}" does not declare conformance to profile from: {1}
|
||||||
ca.uhn.fhir.jpa.interceptor.validation.RuleRequireProfileDeclaration.illegalProfile=Resource of type "{0}" must not declare conformance to profile: {1}
|
ca.uhn.fhir.jpa.interceptor.validation.RuleRequireProfileDeclaration.illegalProfile=Resource of type "{0}" must not declare conformance to profile: {1}
|
||||||
|
|
||||||
|
ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl.invalidUseOfSearchIdentifier=Unsupported search modifier(s): "{0}" for resource type "{1}". Valid search modifiers are: {2}
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
package ca.uhn.fhir.util;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.model.api.IModelJson;
|
||||||
|
import ca.uhn.fhir.model.api.annotation.SensitiveNoDisplay;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonFilter;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
|
import static org.hamcrest.CoreMatchers.is;
|
||||||
|
import static org.hamcrest.CoreMatchers.not;
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
|
||||||
|
class JsonUtilTest {
|
||||||
|
|
||||||
|
@JsonFilter(IModelJson.SENSITIVE_DATA_FILTER_NAME)
|
||||||
|
class TestObject implements IModelJson {
|
||||||
|
@JsonProperty("sensitiveField")
|
||||||
|
@SensitiveNoDisplay
|
||||||
|
private String mySensitiveField;
|
||||||
|
|
||||||
|
@JsonProperty(value = "publicField")
|
||||||
|
private String myPublicField;
|
||||||
|
|
||||||
|
public String getPrivateField() {
|
||||||
|
return mySensitiveField;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSensitiveField(String thePrivateField) {
|
||||||
|
this.mySensitiveField = thePrivateField;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPublicField() {
|
||||||
|
return myPublicField;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPublicField(String thePublicField) {
|
||||||
|
this.myPublicField = thePublicField;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSensitiveNoDisplayAnnotationIsHiddenFromBasicSerialization() {
|
||||||
|
TestObject object = new TestObject();
|
||||||
|
object.setPublicField("Public Value!");
|
||||||
|
object.setSensitiveField("Sensitive Value!");
|
||||||
|
|
||||||
|
String sensitiveExcluded = JsonUtil.serializeOrInvalidRequest(object);
|
||||||
|
assertThat(sensitiveExcluded, is(not(containsString("Sensitive Value!"))));
|
||||||
|
|
||||||
|
String sensitiveIncluded = JsonUtil.serializeWithSensitiveData(object);
|
||||||
|
assertThat(sensitiveIncluded, is(containsString("Sensitive Value!")));
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,6 +4,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
@ -12,7 +13,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
public class ValidateUtilTest {
|
public class ValidateUtilTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testValidate() {
|
public void testIsTrueOrThrowInvalidRequest() {
|
||||||
ValidateUtil.isTrueOrThrowInvalidRequest(true, "");
|
ValidateUtil.isTrueOrThrowInvalidRequest(true, "");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -23,6 +24,18 @@ public class ValidateUtilTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIsTrueOrThrowResourceNotFound() {
|
||||||
|
ValidateUtil.isTrueOrThrowResourceNotFound(true, "");
|
||||||
|
|
||||||
|
try {
|
||||||
|
ValidateUtil.isTrueOrThrowResourceNotFound(false, "The message");
|
||||||
|
fail();
|
||||||
|
} catch (ResourceNotFoundException e) {
|
||||||
|
assertEquals(Msg.code(2494) + "The message", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testIsGreaterThan() {
|
public void testIsGreaterThan() {
|
||||||
ValidateUtil.isGreaterThan(2L, 1L, "");
|
ValidateUtil.isGreaterThan(2L, 1L, "");
|
||||||
|
|
|
@ -29,7 +29,7 @@ public class VersionEnumTest {
|
||||||
int minor = Integer.parseInt(parts[1]);
|
int minor = Integer.parseInt(parts[1]);
|
||||||
int patch = Integer.parseInt(parts[2]);
|
int patch = Integer.parseInt(parts[2]);
|
||||||
|
|
||||||
if (major >= 6 && minor >= 3) {
|
if ((major == 6 && minor >= 3) || (major >= 7)) {
|
||||||
if (minor % 2 == 1) {
|
if (minor % 2 == 1) {
|
||||||
patch = 0;
|
patch = 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-bom</artifactId>
|
<artifactId>hapi-fhir-bom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<name>HAPI FHIR BOM</name>
|
<name>HAPI FHIR BOM</name>
|
||||||
|
@ -12,7 +12,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.batch2.model.JobInstance;
|
||||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
@ -106,6 +107,7 @@ public class BulkImportCommandTest {
|
||||||
writeNdJsonFileToTempDirectory(fileContents1, "file1.json");
|
writeNdJsonFileToTempDirectory(fileContents1, "file1.json");
|
||||||
writeNdJsonFileToTempDirectory(fileContents2, "file2.json");
|
writeNdJsonFileToTempDirectory(fileContents2, "file2.json");
|
||||||
|
|
||||||
|
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(any(), any())).thenReturn(RequestPartitionId.allPartitions());
|
||||||
when(myJobCoordinator.startInstance(any(), any())).thenReturn(createJobStartResponse("THE-JOB-ID"));
|
when(myJobCoordinator.startInstance(any(), any())).thenReturn(createJobStartResponse("THE-JOB-ID"));
|
||||||
|
|
||||||
// Start the command in a separate thread
|
// Start the command in a separate thread
|
||||||
|
@ -149,6 +151,7 @@ public class BulkImportCommandTest {
|
||||||
|
|
||||||
when(myJobCoordinator.startInstance(any(), any()))
|
when(myJobCoordinator.startInstance(any(), any()))
|
||||||
.thenReturn(createJobStartResponse("THE-JOB-ID"));
|
.thenReturn(createJobStartResponse("THE-JOB-ID"));
|
||||||
|
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(any(), any())).thenReturn(RequestPartitionId.allPartitions());
|
||||||
|
|
||||||
// Start the command in a separate thread
|
// Start the command in a separate thread
|
||||||
new Thread(() -> App.main(new String[]{
|
new Thread(() -> App.main(new String[]{
|
||||||
|
@ -189,6 +192,7 @@ public class BulkImportCommandTest {
|
||||||
writeNdJsonFileToTempDirectory(fileContents1, "file1.json");
|
writeNdJsonFileToTempDirectory(fileContents1, "file1.json");
|
||||||
writeNdJsonFileToTempDirectory(fileContents2, "file2.json");
|
writeNdJsonFileToTempDirectory(fileContents2, "file2.json");
|
||||||
|
|
||||||
|
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(any(), any())).thenReturn(RequestPartitionId.allPartitions());
|
||||||
when(myJobCoordinator.startInstance(any(), any())).thenReturn(createJobStartResponse("THE-JOB-ID"));
|
when(myJobCoordinator.startInstance(any(), any())).thenReturn(createJobStartResponse("THE-JOB-ID"));
|
||||||
|
|
||||||
// Start the command in a separate thread
|
// Start the command in a separate thread
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-cli</artifactId>
|
<artifactId>hapi-fhir-cli</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5537
|
||||||
|
title: "Calling the method getOrCreateContentType in AttachmentUtil on an attachment with no content type would throw exception because contentType is a code not a string.
|
||||||
|
This fixes the function to create an empty code as expected"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5547
|
||||||
|
title: "The addition of the indexes `idx_sp_uri_hash_identity_pattern_ops` and `idx_sp_string_hash_nrm_pattern_ops` could occasionally timeout during migration in Postgresql on large databases, leaving the migration table in a failed state, and Smile CDR unable to boot.
|
||||||
|
Now existence of the index is checked before attempting to add it again."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5603
|
||||||
|
jira: SMILE-8000
|
||||||
|
title: "Previously, the semantics of `is-a` were incorrect in Valueset Expansion. The implementation previously used the behaviour of `descendent-of`, which means that `A is-a A` was not being considered as true. This has been corrected. In addition,
|
||||||
|
`descendent-of` is now supported, which compares for strict descendency, and does not include itself. Thanks to Ole Hedegaard (@ohetrifork) for the fix."
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5619
|
||||||
|
jira: SMILE-7909
|
||||||
|
title: "Previously, when a transaction was posted with a resource that had placeholder references and auto versioning
|
||||||
|
references enabled for that path, if the target resource was included in the Bundle but not modified, the reference was
|
||||||
|
saved with a version number that didn't exist. This has been fixed."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5621
|
||||||
|
title: "Fixed a deadlock in resource conditional create."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5623
|
||||||
|
title: "Previously, searches that used more than one chained `Bundle` `SearchParameter` (i.e. `Composition`) were only
|
||||||
|
adding one condition to the underlying SQL query which resulted in incorrect search results. This has been fixed."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5626
|
||||||
|
title: "Previously, an exception could be thrown by the container when executing a contextClosedEvent on the
|
||||||
|
Scheduler Service. This issue has been fixed."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5632
|
||||||
|
title: "Previously bulk export operation was returning an empty response when no resources matched the request, which
|
||||||
|
didn't comply with [HL7 HAPI IG](https://hl7.org/fhir/uv/bulkdata/export/index.html#response---complete-status).
|
||||||
|
This has been corrected."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5633
|
||||||
|
title: "Smile failed to save resources running on Oracle when installed from 2023-02 or earlier.
|
||||||
|
This has been fixed."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5634
|
||||||
|
title: "Previously, expanding a 'ValueSet' with no concepts based on system `urn:ietf:bcp:13` would fail with
|
||||||
|
`ExpansionCouldNotBeCompletedInternallyException`. This has been fixed."
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5636
|
||||||
|
jira: SMILE-7648
|
||||||
|
title: "Previously, the number of threads allocated to the $expunge operation in certain cases could be more
|
||||||
|
than configured, this would cause hundreds of threads to be created and all available database connections
|
||||||
|
to be consumed. This has been fixed."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5640
|
||||||
|
jira: SMILE-7977
|
||||||
|
title: "Clinical reasoning version bump to address reported 'null pointer' error that is encountered when running $evaluate-measure against a measure with an omitted measure.group.population.id"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5642
|
||||||
|
title: "A non-superuser with correct permissions encounters HAPI-0339 when POSTING a transaction Bundle with a PATCH.
|
||||||
|
This has been fixed."
|
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5644
|
||||||
|
title: "Previously, searching for `Bundle` resources with read all `Bundle` resources permissions, returned an
|
||||||
|
HTTP 403 Forbidden error. This was because the `AuthorizationInterceptor` applied permissions to the resources inside
|
||||||
|
the `Bundle`, instead of the `Bundle` itself. This has been fixed and permissions are no longer applied to the resources
|
||||||
|
inside a `Bundle` of type `document`, `message`, or `collection` for `Bundle` requests."
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5649
|
||||||
|
title: "Change database upgrade script to avoid holding locks while adding indices."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5651
|
||||||
|
jira: SMILE-7855
|
||||||
|
title: "Previously, conditional creates would fail with HAPI-0929 errors if there was no preceding '?'.
|
||||||
|
This has been fixed."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
jira: SMILE-7216
|
||||||
|
title: "Previously, the Bulk Import (`$import`) job was ignoring the `httpBasicCredentials` section of the incoming parameters
|
||||||
|
object, causing the job to fail with a 403 error. This has been corrected."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5659
|
||||||
|
title: "Previously, after registering built-in interceptor `PatientIdPartitionInterceptor`, the system bulk export
|
||||||
|
(with no filters) operation would fail with a NullPointerException. This has been fixed."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5110
|
||||||
|
title: "When processing a FHIR transaction in the JPA server, an identifier containing a
|
||||||
|
system that has no value but has an extension present could cause a NullPointerException.
|
||||||
|
This has been corrected."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5667
|
||||||
|
title: "Previously, creating an XML encoded FHIR resource with a decimal element that has a leading plus sign value
|
||||||
|
would result in `JsonParseException` during the read operation from the database. Thus, making it impossible to
|
||||||
|
retrieve or modify such resources. This has been fixed."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5668
|
||||||
|
title: "Added support for sorting on a chained `location.near` search. This allows you to sort location by nearness via a chained search. Thanks to Nicolai Gjøderum (@nigtrifork) for the contribution!"
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5671
|
||||||
|
title: "Avoid lock contention by refreshing SearchParameter cache in a new transaction."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5672
|
||||||
|
title: "Previously, when performing a FHIR search using a non-chained relative reference (returns entire resource) with
|
||||||
|
a server assigned id, it ignores the invalid resourceType in the parameter value and proceeds with the id based lookup. e.g.
|
||||||
|
GET `/MedicationAdministration?context=abc/1352` returns `Encounter/1352`. This has been fixed."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5682
|
||||||
|
title: "The BundleBuilder utility class will no longer include the `/_version/xxx` portion of the
|
||||||
|
resource ID in the `Bundle.entry.fullUrl` it generates, as the FHIR specification states that this
|
||||||
|
should be omitted."
|
|
@ -0,0 +1,9 @@
|
||||||
|
---
|
||||||
|
type: change
|
||||||
|
issue: 5682
|
||||||
|
title: "The IPS $summary generation API has been overhauled to make it more flexible for
|
||||||
|
future use cases. Specifically, the section registry has been removed and folded into
|
||||||
|
the generation strategy, and support has been added for non-JPA sources of data. This is
|
||||||
|
a breaking change to the API, and implementers will need to update their code. This updated
|
||||||
|
API incorporates community feedback, and should now be considered a stable API for IPS
|
||||||
|
generation."
|
|
@ -0,0 +1,26 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 5682
|
||||||
|
title: "Several enhancements have been made to the International Patient Summary generator based on
|
||||||
|
feedback from implementers:
|
||||||
|
<ul>
|
||||||
|
<li>
|
||||||
|
New methods have been added to the <code>IIpsGenerationStrategy</code> allowing resources
|
||||||
|
for any or all sections to be fetched from a source other than the FHIR repository.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
The <code>IpsSectionEnum</code> class has been removed and replaced in any user-facing APIs
|
||||||
|
with references to <code>SectionRegistry.Section</code>. This makes it much easier to
|
||||||
|
extend or replace the section registry with custom sections not defined in the universal
|
||||||
|
IPS implementation guide.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Captions have been removed from narrative section tables, and replaced with H5 tags
|
||||||
|
directly above the table. This results in an easier to read display since the table
|
||||||
|
title will appear above the table instead of below it.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
The IPS narrative generator built in templates will now omit tables when the template
|
||||||
|
specified multiple tables and the specific table would have no resources.
|
||||||
|
</li>
|
||||||
|
</ul>"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5682
|
||||||
|
title: "The IPS Generator will no longer replace resource IDs with placeholder IDs in the resulting
|
||||||
|
bundle by default, although this can be overridden in the generation strategy object."
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5690
|
||||||
|
title: "Previously, a DELETE on a specific URL search string would always attempt to delete no matter the number of
|
||||||
|
resolved resources.
|
||||||
|
This has been fixed by adding a storage setting to enforce a threshold for resolved resources, above which
|
||||||
|
the DELETE operation will fail to execute with HAPI-2496."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5701
|
||||||
|
title: "Previously, invoking search URLs containing ':identifier' would result in a HAPI-1250 error complaining about
|
||||||
|
an invalid resource type.
|
||||||
|
This has been fixed by returning a clearer error message for this specific condition: HAPI-2498."
|
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5707
|
||||||
|
jira: SMILE-7270
|
||||||
|
title: "Previously, with validation active, when a user POSTed a resource with a meta profile with a non-existent
|
||||||
|
StructureDefinition URL, then POSTed the StructureDefinition, POSTing the same or another patient with that same
|
||||||
|
meta profile URL would still fail with a VALIDATION_VAL_PROFILE_UNKNOWN_NOT_POLICY validation error.
|
||||||
|
This has been fixed."
|
|
@ -14,24 +14,17 @@ The IPS Generator uses FHIR resources stored in your repository as its input. Th
|
||||||
|
|
||||||
# Generation Strategy
|
# Generation Strategy
|
||||||
|
|
||||||
A user supplied strategy class is used to determine various properties of the IPS. This class must implement the `IIpsGenerationStrategy` interface. A default implementation called `DefaultIpsGenerationStrategy` is included. You may use this default implementation, use a subclassed version of it that adds additional logic, or use en entirely new implementation.
|
A user supplied strategy class is used to determine various properties of the IPS. This class must implement the `IIpsGenerationStrategy` interface. A default implementation called `DefaultJpaIpsGenerationStrategy` is included. You may use this default implementation, use a subclassed version of it that adds additional logic, or use en entirely new implementation.
|
||||||
|
|
||||||
The generation strategy also supplies the [Section Registry](#section-registry) and [Narrative Templates](#narrative-templates) implementations, so it can be considered the central part of your IPS configuration.
|
The generation strategy also supplies the [Narrative Templates](#narrative-templates) implementations, so it can be considered the central part of your IPS configuration.
|
||||||
|
|
||||||
* JavaDoc: [IIpsGenerationStrategy](/hapi-fhir/apidocs/hapi-fhir-jpaserver-ips/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.html)
|
* JavaDoc: [IIpsGenerationStrategy](/hapi-fhir/apidocs/hapi-fhir-jpaserver-ips/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.html)
|
||||||
* Source Code: [IIpsGenerationStrategy.java](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.java)
|
* Source Code: [IIpsGenerationStrategy.java](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.java)
|
||||||
* JavaDoc: [DefaultIpsGenerationStrategy](/hapi-fhir/apidocs/hapi-fhir-jpaserver-ips/ca/uhn/fhir/jpa/ips/strategy/DefaultIpsGenerationStrategy.html)
|
|
||||||
* Source Code: [DefaultIpsGenerationStrategy.java](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/strategy/DefaultIpsGenerationStrategy.java)
|
|
||||||
|
|
||||||
|
The default generation strategy defines the sections that will be included in your IPS. Out of the box, the standard IPS sections are all included. See the [IG homepage](http://hl7.org/fhir/uv/ips/) for a list of the standard sections.
|
||||||
|
|
||||||
<a name="section-registry"/>
|
* JavaDoc: [DefaultJpaIpsGenerationStrategy](/hapi-fhir/apidocs/hapi-fhir-jpaserver-ips/ca/uhn/fhir/jpa/ips/jpa/DefaultJpaIpsGenerationStrategy.html)
|
||||||
|
* Source Code: [DefaultJpaIpsGenerationStrategy.java](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/jpa/DefaultJpaIpsGenerationStrategy.java)
|
||||||
# Section Registry
|
|
||||||
|
|
||||||
The IPS SectionRegistry class defines the sections that will be included in your IPS. Out of the box, the standard IPS sections are all included. See the [IG homepage](http://hl7.org/fhir/uv/ips/) for a list of the standard sections.
|
|
||||||
|
|
||||||
* JavaDoc: [SectionRegistry](/hapi-fhir/apidocs/hapi-fhir-jpaserver-ips/ca/uhn/fhir/jpa/ips/api/SectionRegistry.html)
|
|
||||||
* Source Code: [SectionRegistry.java](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/SectionRegistry.java)
|
|
||||||
|
|
||||||
|
|
||||||
<a name="narrative-templates"/>
|
<a name="narrative-templates"/>
|
||||||
|
@ -44,7 +37,7 @@ The IPS generator uses HAPI FHIR [Narrative Generation](/hapi-fhir/docs/model/na
|
||||||
|
|
||||||
Narrative templates for individual sections will be supplied a Bundle resource containing only the matched resources for the individual section as entries (ie. the Composition itself will not be present and no other resources will be present). So, for example, when generating the _Allergies / Intolerances_ IPS section narrative, the input to the narrative generator will be a _Bundle_ resource containing only _AllergyIntolerance_ resources.
|
Narrative templates for individual sections will be supplied a Bundle resource containing only the matched resources for the individual section as entries (ie. the Composition itself will not be present and no other resources will be present). So, for example, when generating the _Allergies / Intolerances_ IPS section narrative, the input to the narrative generator will be a _Bundle_ resource containing only _AllergyIntolerance_ resources.
|
||||||
|
|
||||||
The narrative properties file should contain definitions using the profile URL of the individual section (as defined in the [section registry](#section-registry)) as the `.profile` qualifier. For example:
|
The narrative properties file should contain definitions using the profile URL of the individual section (as defined in the section definition within the generation strategy) as the `.profile` qualifier. For example:
|
||||||
|
|
||||||
```properties
|
```properties
|
||||||
ips-allergyintolerance.resourceType=Bundle
|
ips-allergyintolerance.resourceType=Bundle
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -28,13 +28,13 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
import ca.uhn.fhir.util.StopWatch;
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import jakarta.annotation.PostConstruct;
|
import jakarta.annotation.PostConstruct;
|
||||||
|
import jakarta.annotation.PreDestroy;
|
||||||
import org.quartz.JobKey;
|
import org.quartz.JobKey;
|
||||||
import org.quartz.SchedulerException;
|
import org.quartz.SchedulerException;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.context.ApplicationContext;
|
import org.springframework.context.ApplicationContext;
|
||||||
import org.springframework.context.event.ContextClosedEvent;
|
|
||||||
import org.springframework.context.event.ContextRefreshedEvent;
|
import org.springframework.context.event.ContextRefreshedEvent;
|
||||||
import org.springframework.context.event.EventListener;
|
import org.springframework.context.event.EventListener;
|
||||||
import org.springframework.core.env.Environment;
|
import org.springframework.core.env.Environment;
|
||||||
|
@ -177,7 +177,7 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService {
|
||||||
values.forEach(t -> t.scheduleJobs(this));
|
values.forEach(t -> t.scheduleJobs(this));
|
||||||
}
|
}
|
||||||
|
|
||||||
@EventListener(ContextClosedEvent.class)
|
@PreDestroy
|
||||||
public void stop() {
|
public void stop() {
|
||||||
ourLog.info("Shutting down task scheduler...");
|
ourLog.info("Shutting down task scheduler...");
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -75,4 +75,8 @@ public class HibernatePropertiesProvider {
|
||||||
public DataSource getDataSource() {
|
public DataSource getDataSource() {
|
||||||
return myEntityManagerFactory.getDataSource();
|
return myEntityManagerFactory.getDataSource();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isOracleDialect() {
|
||||||
|
return getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,6 +51,7 @@ import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||||
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
|
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
|
||||||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
||||||
|
@ -869,4 +870,10 @@ public class JpaConfig {
|
||||||
public IMetaTagSorter metaTagSorter() {
|
public IMetaTagSorter metaTagSorter() {
|
||||||
return new MetaTagSorterAlphabetical();
|
return new MetaTagSorterAlphabetical();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ResourceHistoryCalculator resourceHistoryCalculator(
|
||||||
|
FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) {
|
||||||
|
return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,7 +85,6 @@ import ca.uhn.fhir.model.api.TagList;
|
||||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.parser.DataFormatException;
|
import ca.uhn.fhir.parser.DataFormatException;
|
||||||
import ca.uhn.fhir.parser.IParser;
|
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
|
@ -105,8 +104,6 @@ import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.hash.HashCode;
|
import com.google.common.hash.HashCode;
|
||||||
import com.google.common.hash.HashFunction;
|
|
||||||
import com.google.common.hash.Hashing;
|
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
import jakarta.annotation.PostConstruct;
|
import jakarta.annotation.PostConstruct;
|
||||||
|
@ -264,6 +261,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
@Autowired
|
@Autowired
|
||||||
private PlatformTransactionManager myTransactionManager;
|
private PlatformTransactionManager myTransactionManager;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected ResourceHistoryCalculator myResourceHistoryCalculator;
|
||||||
|
|
||||||
protected final CodingSpy myCodingSpy = new CodingSpy();
|
protected final CodingSpy myCodingSpy = new CodingSpy();
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
@ -277,6 +277,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
mySearchParamPresenceSvc = theSearchParamPresenceSvc;
|
mySearchParamPresenceSvc = theSearchParamPresenceSvc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setResourceHistoryCalculator(ResourceHistoryCalculator theResourceHistoryCalculator) {
|
||||||
|
myResourceHistoryCalculator = theResourceHistoryCalculator;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
|
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
|
||||||
return myInterceptorBroadcaster;
|
return myInterceptorBroadcaster;
|
||||||
|
@ -643,6 +648,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
theEntity.setResourceType(toResourceName(theResource));
|
theEntity.setResourceType(toResourceName(theResource));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
byte[] resourceBinary;
|
||||||
String resourceText;
|
String resourceText;
|
||||||
ResourceEncodingEnum encoding;
|
ResourceEncodingEnum encoding;
|
||||||
boolean changed = false;
|
boolean changed = false;
|
||||||
|
@ -659,6 +665,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
if (address != null) {
|
if (address != null) {
|
||||||
|
|
||||||
encoding = ResourceEncodingEnum.ESR;
|
encoding = ResourceEncodingEnum.ESR;
|
||||||
|
resourceBinary = null;
|
||||||
resourceText = address.getProviderId() + ":" + address.getLocation();
|
resourceText = address.getProviderId() + ":" + address.getLocation();
|
||||||
changed = true;
|
changed = true;
|
||||||
|
|
||||||
|
@ -675,10 +682,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
|
|
||||||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||||
|
|
||||||
HashFunction sha256 = Hashing.sha256();
|
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||||
resourceText = encodeResource(theResource, encoding, excludeElements, myContext);
|
// more of this logic within the calculator and eliminate more local variables
|
||||||
encoding = ResourceEncodingEnum.JSON;
|
final ResourceHistoryState calculate = myResourceHistoryCalculator.calculateResourceHistoryState(
|
||||||
HashCode hashCode = sha256.hashUnencodedChars(resourceText);
|
theResource, encoding, excludeElements);
|
||||||
|
|
||||||
|
resourceText = calculate.getResourceText();
|
||||||
|
resourceBinary = calculate.getResourceBinary();
|
||||||
|
encoding = calculate.getEncoding(); // This may be a no-op
|
||||||
|
final HashCode hashCode = calculate.getHashCode();
|
||||||
|
|
||||||
String hashSha256 = hashCode.toString();
|
String hashSha256 = hashCode.toString();
|
||||||
if (!hashSha256.equals(theEntity.getHashSha256())) {
|
if (!hashSha256.equals(theEntity.getHashSha256())) {
|
||||||
|
@ -696,6 +708,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
encoding = null;
|
encoding = null;
|
||||||
|
resourceBinary = null;
|
||||||
resourceText = null;
|
resourceText = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -713,6 +726,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resourceBinary = null;
|
||||||
resourceText = null;
|
resourceText = null;
|
||||||
encoding = ResourceEncodingEnum.DEL;
|
encoding = ResourceEncodingEnum.DEL;
|
||||||
}
|
}
|
||||||
|
@ -737,13 +751,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
||||||
changed = true;
|
changed = true;
|
||||||
} else {
|
} else {
|
||||||
changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText);
|
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||||
|
// more of this logic within the calculator and eliminate more local variables
|
||||||
|
changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||||
|
currentHistoryVersion, resourceBinary, resourceText);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
EncodedResource retVal = new EncodedResource();
|
EncodedResource retVal = new EncodedResource();
|
||||||
retVal.setEncoding(encoding);
|
retVal.setEncoding(encoding);
|
||||||
|
retVal.setResourceBinary(resourceBinary);
|
||||||
retVal.setResourceText(resourceText);
|
retVal.setResourceText(resourceText);
|
||||||
retVal.setChanged(changed);
|
retVal.setChanged(changed);
|
||||||
|
|
||||||
|
@ -1393,8 +1411,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
||||||
List<String> excludeElements = new ArrayList<>(8);
|
List<String> excludeElements = new ArrayList<>(8);
|
||||||
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
||||||
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
|
String encodedResourceString =
|
||||||
boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString);
|
myResourceHistoryCalculator.encodeResource(theResource, encoding, excludeElements);
|
||||||
|
byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(encoding, encodedResourceString);
|
||||||
|
final boolean changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||||
|
historyEntity, resourceBinary, encodedResourceString);
|
||||||
|
|
||||||
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||||
|
|
||||||
|
@ -1406,14 +1427,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
return historyEntity;
|
return historyEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON);
|
myResourceHistoryCalculator.populateEncodedResource(
|
||||||
|
encodedResource, encodedResourceString, resourceBinary, encoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Save the resource itself to the resourceHistoryTable
|
* Save the resource itself to the resourceHistoryTable
|
||||||
*/
|
*/
|
||||||
historyEntity = myEntityManager.merge(historyEntity);
|
historyEntity = myEntityManager.merge(historyEntity);
|
||||||
historyEntity.setEncoding(encodedResource.getEncoding());
|
historyEntity.setEncoding(encodedResource.getEncoding());
|
||||||
|
historyEntity.setResource(encodedResource.getResourceBinary());
|
||||||
historyEntity.setResourceTextVc(encodedResource.getResourceText());
|
historyEntity.setResourceTextVc(encodedResource.getResourceText());
|
||||||
myResourceHistoryTableDao.save(historyEntity);
|
myResourceHistoryTableDao.save(historyEntity);
|
||||||
|
|
||||||
|
@ -1423,8 +1445,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
}
|
}
|
||||||
|
|
||||||
private void populateEncodedResource(
|
private void populateEncodedResource(
|
||||||
EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) {
|
EncodedResource encodedResource,
|
||||||
|
String encodedResourceString,
|
||||||
|
byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding) {
|
||||||
encodedResource.setResourceText(encodedResourceString);
|
encodedResource.setResourceText(encodedResourceString);
|
||||||
|
encodedResource.setResourceBinary(theResourceBinary);
|
||||||
encodedResource.setEncoding(theEncoding);
|
encodedResource.setEncoding(theEncoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1489,6 +1515,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
}
|
}
|
||||||
|
|
||||||
historyEntry.setEncoding(theChanged.getEncoding());
|
historyEntry.setEncoding(theChanged.getEncoding());
|
||||||
|
historyEntry.setResource(theChanged.getResourceBinary());
|
||||||
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
||||||
|
|
||||||
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
||||||
|
@ -1926,16 +1953,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
return resourceText;
|
return resourceText;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String encodeResource(
|
|
||||||
IBaseResource theResource,
|
|
||||||
ResourceEncodingEnum theEncoding,
|
|
||||||
List<String> theExcludeElements,
|
|
||||||
FhirContext theContext) {
|
|
||||||
IParser parser = theEncoding.newParser(theContext);
|
|
||||||
parser.setDontEncodeElements(theExcludeElements);
|
|
||||||
return parser.encodeResourceToString(theResource);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String parseNarrativeTextIntoWords(IBaseResource theResource) {
|
private static String parseNarrativeTextIntoWords(IBaseResource theResource) {
|
||||||
|
|
||||||
StringBuilder b = new StringBuilder();
|
StringBuilder b = new StringBuilder();
|
||||||
|
|
|
@ -30,7 +30,6 @@ import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
|
@ -852,12 +851,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
return deleteExpunge(theUrl, theRequest);
|
return deleteExpunge(theUrl, theRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
|
return myTransactionService
|
||||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
.withRequest(theRequest)
|
||||||
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest, transactionDetails);
|
.withTransactionDetails(transactionDetails)
|
||||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
.execute(tx -> {
|
||||||
return outcome;
|
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||||
});
|
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest, transactionDetails);
|
||||||
|
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||||
|
return outcome;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -872,10 +874,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
@Nonnull TransactionDetails theTransactionDetails) {
|
@Nonnull TransactionDetails theTransactionDetails) {
|
||||||
validateDeleteEnabled();
|
validateDeleteEnabled();
|
||||||
|
|
||||||
return myTransactionService.execute(
|
return myTransactionService
|
||||||
theRequestDetails,
|
.withRequest(theRequestDetails)
|
||||||
theTransactionDetails,
|
.withTransactionDetails(theTransactionDetails)
|
||||||
tx -> doDeleteByUrl(theUrl, deleteConflicts, theTransactionDetails, theRequestDetails));
|
.execute(tx -> doDeleteByUrl(theUrl, deleteConflicts, theTransactionDetails, theRequestDetails));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
@ -902,6 +904,19 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
theUrl,
|
theUrl,
|
||||||
resourceIds.size()));
|
resourceIds.size()));
|
||||||
}
|
}
|
||||||
|
// TODO: LD: There is a still a bug on slow deletes: https://github.com/hapifhir/hapi-fhir/issues/5675
|
||||||
|
final long threshold = getStorageSettings().getRestDeleteByUrlResourceIdThreshold();
|
||||||
|
if (resourceIds.size() > threshold) {
|
||||||
|
throw new PreconditionFailedException(Msg.code(2496)
|
||||||
|
+ getContext()
|
||||||
|
.getLocalizer()
|
||||||
|
.getMessageSanitized(
|
||||||
|
BaseStorageDao.class,
|
||||||
|
"deleteByUrlThresholdExceeded",
|
||||||
|
theUrl,
|
||||||
|
resourceIds.size(),
|
||||||
|
threshold));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequestDetails, theTransactionDetails);
|
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequestDetails, theTransactionDetails);
|
||||||
|
@ -1233,9 +1248,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
@Override
|
@Override
|
||||||
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
|
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
|
||||||
StopWatch w = new StopWatch();
|
StopWatch w = new StopWatch();
|
||||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, null);
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details);
|
myRequestPartitionHelperService.determineReadPartitionForRequestForHistory(
|
||||||
|
theRequestDetails, myResourceName, null);
|
||||||
IBundleProvider retVal = myTransactionService
|
IBundleProvider retVal = myTransactionService
|
||||||
.withRequest(theRequestDetails)
|
.withRequest(theRequestDetails)
|
||||||
.withRequestPartitionId(requestPartitionId)
|
.withRequestPartitionId(requestPartitionId)
|
||||||
|
@ -1254,9 +1269,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
final IIdType theId, final Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequest) {
|
final IIdType theId, final Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequest) {
|
||||||
StopWatch w = new StopWatch();
|
StopWatch w = new StopWatch();
|
||||||
|
|
||||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId);
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details);
|
myRequestPartitionHelperService.determineReadPartitionForRequestForHistory(
|
||||||
|
theRequest, myResourceName, theId);
|
||||||
IBundleProvider retVal = myTransactionService
|
IBundleProvider retVal = myTransactionService
|
||||||
.withRequest(theRequest)
|
.withRequest(theRequest)
|
||||||
.withRequestPartitionId(requestPartitionId)
|
.withRequestPartitionId(requestPartitionId)
|
||||||
|
@ -1284,9 +1299,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
final HistorySearchDateRangeParam theHistorySearchDateRangeParam,
|
final HistorySearchDateRangeParam theHistorySearchDateRangeParam,
|
||||||
RequestDetails theRequest) {
|
RequestDetails theRequest) {
|
||||||
StopWatch w = new StopWatch();
|
StopWatch w = new StopWatch();
|
||||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId);
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details);
|
myRequestPartitionHelperService.determineReadPartitionForRequestForHistory(
|
||||||
|
theRequest, myResourceName, theId);
|
||||||
IBundleProvider retVal = myTransactionService
|
IBundleProvider retVal = myTransactionService
|
||||||
.withRequest(theRequest)
|
.withRequest(theRequest)
|
||||||
.withRequestPartitionId(requestPartitionId)
|
.withRequestPartitionId(requestPartitionId)
|
||||||
|
@ -1333,10 +1348,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
addAllResourcesTypesToReindex(theBase, theRequestDetails, params);
|
addAllResourcesTypesToReindex(theBase, theRequestDetails, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
ReadPartitionIdRequestDetails details =
|
|
||||||
ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX);
|
|
||||||
RequestPartitionId requestPartition =
|
RequestPartitionId requestPartition =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details);
|
myRequestPartitionHelperService.determineReadPartitionForRequestForServerOperation(
|
||||||
|
theRequestDetails, ProviderConstants.OPERATION_REINDEX);
|
||||||
params.setRequestPartitionId(requestPartition);
|
params.setRequestPartitionId(requestPartition);
|
||||||
|
|
||||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||||
|
@ -1710,17 +1724,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|
||||||
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
|
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
|
||||||
byte[] resourceBytes = historyEntity.getResource();
|
byte[] resourceBytes = historyEntity.getResource();
|
||||||
|
|
||||||
// Always migrate data out of the bytes column
|
|
||||||
if (resourceBytes != null) {
|
if (resourceBytes != null) {
|
||||||
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
|
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
|
||||||
ourLog.debug(
|
if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) {
|
||||||
"Storing text of resource {} version {} as inline VARCHAR",
|
changed = true;
|
||||||
entity.getResourceId(),
|
}
|
||||||
historyEntity.getVersion());
|
|
||||||
historyEntity.setResourceTextVc(resourceText);
|
|
||||||
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
|
|
||||||
changed = true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
||||||
|
@ -1966,7 +1974,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
||||||
theRequest, getResourceName(), theParams, null);
|
theRequest, getResourceName(), theParams);
|
||||||
IBundleProvider retVal = mySearchCoordinatorSvc.registerSearch(
|
IBundleProvider retVal = mySearchCoordinatorSvc.registerSearch(
|
||||||
this, theParams, getResourceName(), cacheControlDirective, theRequest, requestPartitionId);
|
this, theParams, getResourceName(), cacheControlDirective, theRequest, requestPartitionId);
|
||||||
|
|
||||||
|
@ -2135,7 +2143,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
BiFunction<RequestDetails, Stream<JpaPid>, Stream<V>> transform) {
|
BiFunction<RequestDetails, Stream<JpaPid>, Stream<V>> transform) {
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
||||||
theRequest, myResourceName, theParams, null);
|
theRequest, myResourceName, theParams);
|
||||||
|
|
||||||
String uuid = UUID.randomUUID().toString();
|
String uuid = UUID.randomUUID().toString();
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.dao;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||||
|
@ -158,9 +157,9 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
||||||
@Override
|
@Override
|
||||||
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
|
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
|
||||||
StopWatch w = new StopWatch();
|
StopWatch w = new StopWatch();
|
||||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(null, null);
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details);
|
myRequestPartitionHelperService.determineReadPartitionForRequestForHistory(
|
||||||
|
theRequestDetails, null, null);
|
||||||
IBundleProvider retVal = myTransactionService
|
IBundleProvider retVal = myTransactionService
|
||||||
.withRequest(theRequestDetails)
|
.withRequest(theRequestDetails)
|
||||||
.withRequestPartitionId(requestPartitionId)
|
.withRequestPartitionId(requestPartitionId)
|
||||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
class EncodedResource {
|
class EncodedResource {
|
||||||
|
|
||||||
private boolean myChanged;
|
private boolean myChanged;
|
||||||
|
private byte[] myResource;
|
||||||
private ResourceEncodingEnum myEncoding;
|
private ResourceEncodingEnum myEncoding;
|
||||||
private String myResourceText;
|
private String myResourceText;
|
||||||
|
|
||||||
|
@ -35,6 +36,14 @@ class EncodedResource {
|
||||||
myEncoding = theEncoding;
|
myEncoding = theEncoding;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public byte[] getResourceBinary() {
|
||||||
|
return myResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResourceBinary(byte[] theResource) {
|
||||||
|
myResource = theResource;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean isChanged() {
|
public boolean isChanged() {
|
||||||
return myChanged;
|
return myChanged;
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,6 +91,9 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
||||||
// TermReadSvcImpl calls these methods as a part of its "isCodeSystemSupported" calls.
|
// TermReadSvcImpl calls these methods as a part of its "isCodeSystemSupported" calls.
|
||||||
// We should modify CachingValidationSupport to cache the results of "isXXXSupported"
|
// We should modify CachingValidationSupport to cache the results of "isXXXSupported"
|
||||||
// at which point we could do away with this cache
|
// at which point we could do away with this cache
|
||||||
|
// TODO: LD: This cache seems to supersede the cache in CachingValidationSupport, as that cache is set to
|
||||||
|
// 10 minutes, but this 1 minute cache now determines the expiry.
|
||||||
|
// This new behaviour was introduced between the 7.0.0 release and the current master (7.2.0)
|
||||||
private Cache<String, IBaseResource> myLoadCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1), 1000);
|
private Cache<String, IBaseResource> myLoadCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1), 1000);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -188,6 +191,9 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
||||||
IBaseResource fetched = myLoadCache.get(key, t -> doFetchResource(theClass, theUri));
|
IBaseResource fetched = myLoadCache.get(key, t -> doFetchResource(theClass, theUri));
|
||||||
|
|
||||||
if (fetched == myNoMatch) {
|
if (fetched == myNoMatch) {
|
||||||
|
ourLog.debug(
|
||||||
|
"Invalidating cache entry for URI: {} since the result of the underlying query is empty", theUri);
|
||||||
|
myLoadCache.invalidate(key);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
|
||||||
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
||||||
theRequestDetails, getResourceName(), theSearchParameterMap, null);
|
theRequestDetails, getResourceName(), theSearchParameterMap);
|
||||||
return mySearchCoordinatorSvc.registerSearch(
|
return mySearchCoordinatorSvc.registerSearch(
|
||||||
this,
|
this,
|
||||||
theSearchParameterMap,
|
theSearchParameterMap,
|
||||||
|
@ -128,7 +128,7 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
|
||||||
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
|
||||||
theRequestDetails, getResourceName(), theSearchParameterMap, null);
|
theRequestDetails, getResourceName(), theSearchParameterMap);
|
||||||
|
|
||||||
List<List<IQueryParameterType>> patientParams = new ArrayList<>();
|
List<List<IQueryParameterType>> patientParams = new ArrayList<>();
|
||||||
if (theSearchParameterMap.get(getPatientParamName()) != null) {
|
if (theSearchParameterMap.get(getPatientParamName()) != null) {
|
||||||
|
|
|
@ -106,7 +106,7 @@ public class JpaResourceDaoPatient<T extends IBaseResource> extends BaseHapiFhir
|
||||||
}
|
}
|
||||||
|
|
||||||
RequestPartitionId requestPartitionId = myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(
|
RequestPartitionId requestPartitionId = myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(
|
||||||
theRequest, getResourceName(), paramMap, null);
|
theRequest, getResourceName(), paramMap);
|
||||||
|
|
||||||
adjustCount(theRequest, paramMap);
|
adjustCount(theRequest, paramMap);
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,153 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.parser.IParser;
|
||||||
|
import com.google.common.hash.HashCode;
|
||||||
|
import com.google.common.hash.HashFunction;
|
||||||
|
import com.google.common.hash.Hashing;
|
||||||
|
import jakarta.annotation.Nonnull;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Responsible for various resource history-centric and {@link FhirContext} aware operations called by
|
||||||
|
* {@link BaseHapiFhirDao} or {@link BaseHapiFhirResourceDao} that require knowledge of whether an Oracle database is
|
||||||
|
* being used.
|
||||||
|
*/
|
||||||
|
public class ResourceHistoryCalculator {
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculator.class);
|
||||||
|
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||||
|
|
||||||
|
private final FhirContext myFhirContext;
|
||||||
|
private final boolean myIsOracleDialect;
|
||||||
|
|
||||||
|
public ResourceHistoryCalculator(FhirContext theFhirContext, boolean theIsOracleDialect) {
|
||||||
|
myFhirContext = theFhirContext;
|
||||||
|
myIsOracleDialect = theIsOracleDialect;
|
||||||
|
}
|
||||||
|
|
||||||
|
ResourceHistoryState calculateResourceHistoryState(
|
||||||
|
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||||
|
final String encodedResource = encodeResource(theResource, theEncoding, theExcludeElements);
|
||||||
|
final byte[] resourceBinary;
|
||||||
|
final String resourceText;
|
||||||
|
final ResourceEncodingEnum encoding;
|
||||||
|
final HashCode hashCode;
|
||||||
|
|
||||||
|
if (myIsOracleDialect) {
|
||||||
|
resourceText = null;
|
||||||
|
resourceBinary = getResourceBinary(theEncoding, encodedResource);
|
||||||
|
encoding = theEncoding;
|
||||||
|
hashCode = SHA_256.hashBytes(resourceBinary);
|
||||||
|
} else {
|
||||||
|
resourceText = encodedResource;
|
||||||
|
resourceBinary = null;
|
||||||
|
encoding = ResourceEncodingEnum.JSON;
|
||||||
|
hashCode = SHA_256.hashUnencodedChars(encodedResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ResourceHistoryState(resourceText, resourceBinary, encoding, hashCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean conditionallyAlterHistoryEntity(
|
||||||
|
ResourceTable theEntity, ResourceHistoryTable theHistoryEntity, String theResourceText) {
|
||||||
|
if (!myIsOracleDialect) {
|
||||||
|
ourLog.debug(
|
||||||
|
"Storing text of resource {} version {} as inline VARCHAR",
|
||||||
|
theEntity.getResourceId(),
|
||||||
|
theHistoryEntity.getVersion());
|
||||||
|
theHistoryEntity.setResourceTextVc(theResourceText);
|
||||||
|
theHistoryEntity.setResource(null);
|
||||||
|
theHistoryEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean isResourceHistoryChanged(
|
||||||
|
ResourceHistoryTable theCurrentHistoryVersion,
|
||||||
|
@Nullable byte[] theResourceBinary,
|
||||||
|
@Nullable String resourceText) {
|
||||||
|
if (myIsOracleDialect) {
|
||||||
|
return !Arrays.equals(theCurrentHistoryVersion.getResource(), theResourceBinary);
|
||||||
|
}
|
||||||
|
|
||||||
|
return !StringUtils.equals(theCurrentHistoryVersion.getResourceTextVc(), resourceText);
|
||||||
|
}
|
||||||
|
|
||||||
|
String encodeResource(
|
||||||
|
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||||
|
final IParser parser = theEncoding.newParser(myFhirContext);
|
||||||
|
parser.setDontEncodeElements(theExcludeElements);
|
||||||
|
return parser.encodeResourceToString(theResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper for returning the encoded byte array of the input resource string based on the theEncoding.
|
||||||
|
*
|
||||||
|
* @param theEncoding the theEncoding to used
|
||||||
|
* @param theEncodedResource the resource to encode
|
||||||
|
* @return byte array of the resource
|
||||||
|
*/
|
||||||
|
@Nonnull
|
||||||
|
static byte[] getResourceBinary(ResourceEncodingEnum theEncoding, String theEncodedResource) {
|
||||||
|
switch (theEncoding) {
|
||||||
|
case JSON:
|
||||||
|
return theEncodedResource.getBytes(StandardCharsets.UTF_8);
|
||||||
|
case JSONC:
|
||||||
|
return GZipUtil.compress(theEncodedResource);
|
||||||
|
default:
|
||||||
|
return new byte[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void populateEncodedResource(
|
||||||
|
EncodedResource theEncodedResource,
|
||||||
|
String theEncodedResourceString,
|
||||||
|
@Nullable byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding) {
|
||||||
|
if (myIsOracleDialect) {
|
||||||
|
populateEncodedResourceInner(theEncodedResource, null, theResourceBinary, theEncoding);
|
||||||
|
} else {
|
||||||
|
populateEncodedResourceInner(theEncodedResource, theEncodedResourceString, null, ResourceEncodingEnum.JSON);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void populateEncodedResourceInner(
|
||||||
|
EncodedResource encodedResource,
|
||||||
|
String encodedResourceString,
|
||||||
|
byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding) {
|
||||||
|
encodedResource.setResourceText(encodedResourceString);
|
||||||
|
encodedResource.setResourceBinary(theResourceBinary);
|
||||||
|
encodedResource.setEncoding(theEncoding);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,105 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
|
import com.google.common.hash.HashCode;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.StringJoiner;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POJO to contain the results of {@link ResourceHistoryCalculator#calculateResourceHistoryState(IBaseResource, ResourceEncodingEnum, List)}
|
||||||
|
*/
|
||||||
|
public class ResourceHistoryState {
|
||||||
|
@Nullable
|
||||||
|
private final String myResourceText;
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private final byte[] myResourceBinary;
|
||||||
|
|
||||||
|
private final ResourceEncodingEnum myEncoding;
|
||||||
|
private final HashCode myHashCode;
|
||||||
|
|
||||||
|
public ResourceHistoryState(
|
||||||
|
@Nullable String theResourceText,
|
||||||
|
@Nullable byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding,
|
||||||
|
HashCode theHashCode) {
|
||||||
|
myResourceText = theResourceText;
|
||||||
|
myResourceBinary = theResourceBinary;
|
||||||
|
myEncoding = theEncoding;
|
||||||
|
myHashCode = theHashCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public String getResourceText() {
|
||||||
|
return myResourceText;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public byte[] getResourceBinary() {
|
||||||
|
return myResourceBinary;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ResourceEncodingEnum getEncoding() {
|
||||||
|
return myEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
|
public HashCode getHashCode() {
|
||||||
|
return myHashCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object theO) {
|
||||||
|
if (this == theO) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (theO == null || getClass() != theO.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ResourceHistoryState that = (ResourceHistoryState) theO;
|
||||||
|
return Objects.equals(myResourceText, that.myResourceText)
|
||||||
|
&& Arrays.equals(myResourceBinary, that.myResourceBinary)
|
||||||
|
&& myEncoding == that.myEncoding
|
||||||
|
&& Objects.equals(myHashCode, that.myHashCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
int result = Objects.hash(myResourceText, myEncoding, myHashCode);
|
||||||
|
result = 31 * result + Arrays.hashCode(myResourceBinary);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return new StringJoiner(", ", ResourceHistoryState.class.getSimpleName() + "[", "]")
|
||||||
|
.add("myResourceText='" + myResourceText + "'")
|
||||||
|
.add("myResourceBinary=" + Arrays.toString(myResourceBinary))
|
||||||
|
.add("myEncoding=" + myEncoding)
|
||||||
|
.add("myHashCode=" + myHashCode)
|
||||||
|
.toString();
|
||||||
|
}
|
||||||
|
}
|
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
||||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||||
|
@ -135,10 +134,9 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
|
||||||
|
|
||||||
ourLog.info("BEGINNING GLOBAL $expunge");
|
ourLog.info("BEGINNING GLOBAL $expunge");
|
||||||
Propagation propagation = Propagation.REQUIRES_NEW;
|
Propagation propagation = Propagation.REQUIRES_NEW;
|
||||||
ReadPartitionIdRequestDetails details =
|
|
||||||
ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPUNGE);
|
|
||||||
RequestPartitionId requestPartitionId =
|
RequestPartitionId requestPartitionId =
|
||||||
myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, details);
|
myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(
|
||||||
|
theRequest, ProviderConstants.OPERATION_EXPUNGE);
|
||||||
|
|
||||||
deleteAll(theRequest, propagation, requestPartitionId, counter);
|
deleteAll(theRequest, propagation, requestPartitionId, counter);
|
||||||
|
|
||||||
|
|
|
@ -477,10 +477,19 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<String> translatePidIdToForcedIdWithCache(JpaPid theId) {
|
public Optional<String> translatePidIdToForcedIdWithCache(JpaPid theId) {
|
||||||
return myMemoryCacheService.get(
|
// do getIfPresent and then put to avoid doing I/O inside the cache.
|
||||||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
|
Optional<String> forcedId =
|
||||||
theId.getId(),
|
myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getId());
|
||||||
pid -> myResourceTableDao.findById(pid).map(ResourceTable::asTypedFhirResourceId));
|
|
||||||
|
if (forcedId == null) {
|
||||||
|
// This is only called when we know the resource exists.
|
||||||
|
// So this optional is only empty when there is no hfj_forced_id table
|
||||||
|
// note: this is obsolete with the new fhir_id column, and will go away.
|
||||||
|
forcedId = myResourceTableDao.findById(theId.getId()).map(ResourceTable::asTypedFhirResourceId);
|
||||||
|
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getId(), forcedId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return forcedId;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
||||||
|
|
|
@ -133,10 +133,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
mdmLinkTable
|
mdmLinkTable
|
||||||
.addIndex("20230911.1", "IDX_EMPI_TGT_MR_LS")
|
.addIndex("20230911.1", "IDX_EMPI_TGT_MR_LS")
|
||||||
.unique(false)
|
.unique(false)
|
||||||
|
.online(true)
|
||||||
.withColumns("TARGET_TYPE", "MATCH_RESULT", "LINK_SOURCE");
|
.withColumns("TARGET_TYPE", "MATCH_RESULT", "LINK_SOURCE");
|
||||||
mdmLinkTable
|
mdmLinkTable
|
||||||
.addIndex("20230911.2", "IDX_EMPi_TGT_MR_SCore")
|
.addIndex("20230911.2", "IDX_EMPi_TGT_MR_SCore")
|
||||||
.unique(false)
|
.unique(false)
|
||||||
|
.online(true)
|
||||||
.withColumns("TARGET_TYPE", "MATCH_RESULT", "SCORE");
|
.withColumns("TARGET_TYPE", "MATCH_RESULT", "SCORE");
|
||||||
|
|
||||||
// Move forced_id constraints to hfj_resource and the new fhir_id column
|
// Move forced_id constraints to hfj_resource and the new fhir_id column
|
||||||
|
@ -166,7 +168,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
.withColumns("RES_TYPE", "FHIR_ID");
|
.withColumns("RES_TYPE", "FHIR_ID");
|
||||||
|
|
||||||
// For resolving references that don't supply the type.
|
// For resolving references that don't supply the type.
|
||||||
hfjResource.addIndex("20231027.3", "IDX_RES_FHIR_ID").unique(false).withColumns("FHIR_ID");
|
hfjResource
|
||||||
|
.addIndex("20231027.3", "IDX_RES_FHIR_ID")
|
||||||
|
.unique(false)
|
||||||
|
.online(true)
|
||||||
|
.withColumns("FHIR_ID");
|
||||||
|
|
||||||
Builder.BuilderWithTableName batch2JobInstanceTable = version.onTable("BT2_JOB_INSTANCE");
|
Builder.BuilderWithTableName batch2JobInstanceTable = version.onTable("BT2_JOB_INSTANCE");
|
||||||
|
|
||||||
|
@ -177,25 +183,32 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
{
|
{
|
||||||
version.executeRawSql(
|
version.executeRawSql(
|
||||||
"20231212.1",
|
"20231212.1",
|
||||||
"CREATE INDEX idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
|
"CREATE INDEX CONCURRENTLY idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
|
||||||
|
.setTransactional(false)
|
||||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||||
.onlyIf(
|
.onlyIf(
|
||||||
String.format(
|
String.format(
|
||||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||||
"HFJ_SPIDX_STRING".toLowerCase(),
|
"HFJ_SPIDX_STRING".toLowerCase(),
|
||||||
"SP_VALUE_NORMALIZED".toLowerCase()),
|
"SP_VALUE_NORMALIZED".toLowerCase()),
|
||||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing")
|
||||||
|
.onlyIf(
|
||||||
|
"SELECT NOT EXISTS(select 1 from pg_indexes where indexname='idx_sp_string_hash_nrm_pattern_ops')",
|
||||||
|
"Index idx_sp_string_hash_nrm_pattern_ops already exists");
|
||||||
version.executeRawSql(
|
version.executeRawSql(
|
||||||
"20231212.2",
|
"20231212.2",
|
||||||
"CREATE UNIQUE INDEX idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
|
"CREATE UNIQUE INDEX CONCURRENTLY idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
|
||||||
|
.setTransactional(false)
|
||||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||||
.onlyIf(
|
.onlyIf(
|
||||||
String.format(
|
String.format(
|
||||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||||
"HFJ_SPIDX_URI".toLowerCase(),
|
"HFJ_SPIDX_URI".toLowerCase(),
|
||||||
"SP_URI".toLowerCase()),
|
"SP_URI".toLowerCase()),
|
||||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing")
|
||||||
|
.onlyIf(
|
||||||
|
"SELECT NOT EXISTS(select 1 from pg_indexes where indexname='idx_sp_uri_hash_identity_pattern_ops')",
|
||||||
|
"Index idx_sp_uri_hash_identity_pattern_ops already exists.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// This fix was bad for MSSQL, it has been set to do nothing.
|
// This fix was bad for MSSQL, it has been set to do nothing.
|
||||||
|
@ -622,6 +635,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
version.executeRawSqls("20230402.1", Map.of(DriverTypeEnum.POSTGRES_9_4, postgresTuningStatements));
|
version.executeRawSqls("20230402.1", Map.of(DriverTypeEnum.POSTGRES_9_4, postgresTuningStatements));
|
||||||
|
|
||||||
// Use an unlimited length text column for RES_TEXT_VC
|
// Use an unlimited length text column for RES_TEXT_VC
|
||||||
|
// N.B. This will FAIL SILENTLY on Oracle due to the fact that Oracle does not support an ALTER TABLE from
|
||||||
|
// VARCHAR to
|
||||||
|
// CLOB. Because of failureAllowed() this won't halt the migration
|
||||||
version.onTable("HFJ_RES_VER")
|
version.onTable("HFJ_RES_VER")
|
||||||
.modifyColumn("20230421.1", "RES_TEXT_VC")
|
.modifyColumn("20230421.1", "RES_TEXT_VC")
|
||||||
.nullable()
|
.nullable()
|
||||||
|
|
|
@ -296,7 +296,8 @@ public class QueryStack {
|
||||||
String theReferenceTargetType,
|
String theReferenceTargetType,
|
||||||
String theParamName,
|
String theParamName,
|
||||||
String theChain,
|
String theChain,
|
||||||
boolean theAscending) {
|
boolean theAscending,
|
||||||
|
SearchParameterMap theParams) {
|
||||||
BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
|
BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
|
||||||
ResourceLinkPredicateBuilder resourceLinkPredicateBuilder = mySqlBuilder.createReferencePredicateBuilder(this);
|
ResourceLinkPredicateBuilder resourceLinkPredicateBuilder = mySqlBuilder.createReferencePredicateBuilder(this);
|
||||||
|
|
||||||
|
@ -378,13 +379,37 @@ public class QueryStack {
|
||||||
* sort on a target that was a reference or a quantity, but if someone needed
|
* sort on a target that was a reference or a quantity, but if someone needed
|
||||||
* that we could implement it here.
|
* that we could implement it here.
|
||||||
*/
|
*/
|
||||||
|
case SPECIAL: {
|
||||||
|
if (LOCATION_POSITION.equals(targetSearchParameter.getPath())) {
|
||||||
|
List<List<IQueryParameterType>> params = theParams.get(theParamName);
|
||||||
|
if (params != null && !params.isEmpty() && !params.get(0).isEmpty()) {
|
||||||
|
IQueryParameterType locationParam = params.get(0).get(0);
|
||||||
|
final SpecialParam specialParam =
|
||||||
|
new SpecialParam().setValue(locationParam.getValueAsQueryToken(myFhirContext));
|
||||||
|
ParsedLocationParam location = ParsedLocationParam.from(theParams, specialParam);
|
||||||
|
double latitudeValue = location.getLatitudeValue();
|
||||||
|
double longitudeValue = location.getLongitudeValue();
|
||||||
|
final CoordsPredicateBuilder coordsPredicateBuilder = mySqlBuilder.addCoordsPredicateBuilder(
|
||||||
|
resourceLinkPredicateBuilder.getColumnTargetResourceId());
|
||||||
|
mySqlBuilder.addSortCoordsNear(
|
||||||
|
coordsPredicateBuilder, latitudeValue, longitudeValue, theAscending);
|
||||||
|
} else {
|
||||||
|
String msg = myFhirContext
|
||||||
|
.getLocalizer()
|
||||||
|
.getMessageSanitized(
|
||||||
|
QueryStack.class, "cantSortOnCoordParamWithoutValues", theParamName);
|
||||||
|
throw new InvalidRequestException(Msg.code(2497) + msg);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
case NUMBER:
|
case NUMBER:
|
||||||
case REFERENCE:
|
case REFERENCE:
|
||||||
case COMPOSITE:
|
case COMPOSITE:
|
||||||
case QUANTITY:
|
case QUANTITY:
|
||||||
case URI:
|
case URI:
|
||||||
case HAS:
|
case HAS:
|
||||||
case SPECIAL:
|
|
||||||
default:
|
default:
|
||||||
throw new InvalidRequestException(Msg.code(2290) + "Unable to sort on a chained parameter "
|
throw new InvalidRequestException(Msg.code(2290) + "Unable to sort on a chained parameter "
|
||||||
+ theParamName + "." + theChain + " as this parameter. Can not sort on chains of target type: "
|
+ theParamName + "." + theChain + " as this parameter. Can not sort on chains of target type: "
|
||||||
|
@ -2466,7 +2491,7 @@ public class QueryStack {
|
||||||
theRequestPartitionId,
|
theRequestPartitionId,
|
||||||
andPredicates,
|
andPredicates,
|
||||||
nextAnd)) {
|
nextAnd)) {
|
||||||
break;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
EmbeddedChainedSearchModeEnum embeddedChainedSearchModeEnum =
|
EmbeddedChainedSearchModeEnum embeddedChainedSearchModeEnum =
|
||||||
|
|
|
@ -932,7 +932,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||||
break;
|
break;
|
||||||
case REFERENCE:
|
case REFERENCE:
|
||||||
theQueryStack.addSortOnResourceLink(
|
theQueryStack.addSortOnResourceLink(
|
||||||
myResourceName, referenceTargetType, paramName, chainName, ascending);
|
myResourceName, referenceTargetType, paramName, chainName, ascending, theParams);
|
||||||
break;
|
break;
|
||||||
case TOKEN:
|
case TOKEN:
|
||||||
theQueryStack.addSortOnToken(myResourceName, paramName, ascending);
|
theQueryStack.addSortOnToken(myResourceName, paramName, ascending);
|
||||||
|
|
|
@ -49,7 +49,6 @@ import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.parser.DataFormatException;
|
import ca.uhn.fhir.parser.DataFormatException;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
|
||||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||||
|
@ -60,6 +59,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||||
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
||||||
|
@ -70,6 +70,7 @@ import com.healthmarketscience.sqlbuilder.UnaryCondition;
|
||||||
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
|
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -83,16 +84,20 @@ import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.ListIterator;
|
import java.util.ListIterator;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with;
|
import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with;
|
||||||
|
import static ca.uhn.fhir.rest.api.Constants.*;
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
import static org.apache.commons.lang3.StringUtils.trim;
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder implements ICanMakeMissingParamPredicate {
|
public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder implements ICanMakeMissingParamPredicate {
|
||||||
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceLinkPredicateBuilder.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(ResourceLinkPredicateBuilder.class);
|
||||||
|
private static final Pattern MODIFIER_REPLACE_PATTERN = Pattern.compile(".*:");
|
||||||
private final DbColumn myColumnSrcType;
|
private final DbColumn myColumnSrcType;
|
||||||
private final DbColumn myColumnSrcPath;
|
private final DbColumn myColumnSrcPath;
|
||||||
private final DbColumn myColumnTargetResourceId;
|
private final DbColumn myColumnTargetResourceId;
|
||||||
|
@ -204,6 +209,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
||||||
targetQualifiedUrls.add(dt.getValue());
|
targetQualifiedUrls.add(dt.getValue());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
validateModifierUse(theRequest, theResourceType, ref);
|
||||||
|
validateResourceTypeInReferenceParam(ref.getResourceType());
|
||||||
targetIds.add(dt);
|
targetIds.add(dt);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -256,6 +263,53 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void validateModifierUse(RequestDetails theRequest, String theResourceType, ReferenceParam theRef) {
|
||||||
|
try {
|
||||||
|
final String resourceTypeFromRef = theRef.getResourceType();
|
||||||
|
if (StringUtils.isEmpty(resourceTypeFromRef)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// TODO: LD: unless we do this, ResourceProviderR4Test#testSearchWithSlashes will fail due to its
|
||||||
|
// derived-from: syntax
|
||||||
|
getFhirContext().getResourceDefinition(resourceTypeFromRef);
|
||||||
|
} catch (DataFormatException e) {
|
||||||
|
final List<String> nonMatching = Optional.ofNullable(theRequest)
|
||||||
|
.map(RequestDetails::getParameters)
|
||||||
|
.map(params -> params.keySet().stream()
|
||||||
|
.filter(mod -> mod.contains(":"))
|
||||||
|
.map(MODIFIER_REPLACE_PATTERN::matcher)
|
||||||
|
.map(pattern -> pattern.replaceAll(":"))
|
||||||
|
.filter(mod -> !VALID_MODIFIERS.contains(mod))
|
||||||
|
.distinct()
|
||||||
|
.collect(Collectors.toUnmodifiableList()))
|
||||||
|
.orElse(Collections.emptyList());
|
||||||
|
|
||||||
|
if (!nonMatching.isEmpty()) {
|
||||||
|
final String msg = getFhirContext()
|
||||||
|
.getLocalizer()
|
||||||
|
.getMessageSanitized(
|
||||||
|
SearchCoordinatorSvcImpl.class,
|
||||||
|
"invalidUseOfSearchIdentifier",
|
||||||
|
nonMatching,
|
||||||
|
theResourceType,
|
||||||
|
VALID_MODIFIERS);
|
||||||
|
throw new InvalidRequestException(Msg.code(2498) + msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateResourceTypeInReferenceParam(final String theResourceType) {
|
||||||
|
if (StringUtils.isEmpty(theResourceType)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
getFhirContext().getResourceDefinition(theResourceType);
|
||||||
|
} catch (DataFormatException e) {
|
||||||
|
throw newInvalidResourceTypeException(theResourceType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private Condition createPredicateReference(
|
private Condition createPredicateReference(
|
||||||
boolean theInverse,
|
boolean theInverse,
|
||||||
List<String> thePathsToMatch,
|
List<String> thePathsToMatch,
|
||||||
|
@ -355,18 +409,14 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
||||||
/*
|
/*
|
||||||
* Handle chain on _type
|
* Handle chain on _type
|
||||||
*/
|
*/
|
||||||
if (Constants.PARAM_TYPE.equals(theReferenceParam.getChain())) {
|
if (PARAM_TYPE.equals(theReferenceParam.getChain())) {
|
||||||
|
|
||||||
List<String> pathsToMatch = createResourceLinkPaths(theResourceName, theParamName, theQualifiers);
|
List<String> pathsToMatch = createResourceLinkPaths(theResourceName, theParamName, theQualifiers);
|
||||||
Condition typeCondition = createPredicateSourcePaths(pathsToMatch);
|
Condition typeCondition = createPredicateSourcePaths(pathsToMatch);
|
||||||
|
|
||||||
String typeValue = theReferenceParam.getValue();
|
String typeValue = theReferenceParam.getValue();
|
||||||
|
|
||||||
try {
|
validateResourceTypeInReferenceParam(typeValue);
|
||||||
getFhirContext().getResourceDefinition(typeValue).getImplementingClass();
|
|
||||||
} catch (DataFormatException e) {
|
|
||||||
throw newInvalidResourceTypeException(typeValue);
|
|
||||||
}
|
|
||||||
if (!resourceTypes.contains(typeValue)) {
|
if (!resourceTypes.contains(typeValue)) {
|
||||||
throw newInvalidTargetTypeForChainException(theResourceName, theParamName, typeValue);
|
throw newInvalidTargetTypeForChainException(theResourceName, theParamName, typeValue);
|
||||||
}
|
}
|
||||||
|
@ -705,7 +755,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
||||||
.getLocalizer()
|
.getLocalizer()
|
||||||
.getMessage(
|
.getMessage(
|
||||||
ResourceLinkPredicateBuilder.class, "invalidTargetTypeForChain", theTypeValue, searchParamName);
|
ResourceLinkPredicateBuilder.class, "invalidTargetTypeForChain", theTypeValue, searchParamName);
|
||||||
return new InvalidRequestException(msg);
|
return new InvalidRequestException(Msg.code(2495) + msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
@ -765,4 +815,14 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
||||||
|
|
||||||
return combineWithRequestPartitionIdPredicate(theParams.getRequestPartitionId(), unaryCondition);
|
return combineWithRequestPartitionIdPredicate(theParams.getRequestPartitionId(), unaryCondition);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
void setSearchParamRegistryForUnitTest(ISearchParamRegistry theSearchParamRegistry) {
|
||||||
|
mySearchParamRegistry = theSearchParamRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
void setIdHelperServiceForUnitTest(IIdHelperService theIdHelperService) {
|
||||||
|
myIdHelperService = theIdHelperService;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.search.reindex;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
|
@ -33,6 +32,9 @@ import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
|
||||||
import ca.uhn.fhir.jpa.model.entity.*;
|
import ca.uhn.fhir.jpa.model.entity.*;
|
||||||
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
|
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
|
||||||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||||
|
@ -236,8 +238,7 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private RequestPartitionId determinePartition(RequestDetails theRequestDetails, IIdType theResourceId) {
|
private RequestPartitionId determinePartition(RequestDetails theRequestDetails, IIdType theResourceId) {
|
||||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forRead(theResourceId);
|
return myPartitionHelperSvc.determineReadPartitionForRequestForRead(theRequestDetails, theResourceId);
|
||||||
return myPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
|
|
@ -649,7 +649,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
.getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp);
|
.getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp);
|
||||||
theAccumulator.addMessage(msg);
|
theAccumulator.addMessage(msg);
|
||||||
expandConcepts(
|
expandConcepts(
|
||||||
theExpansionOptions, theAccumulator, termValueSet, theFilter, theAdd, theAddedCodes, isOracleDialect());
|
theExpansionOptions,
|
||||||
|
theAccumulator,
|
||||||
|
termValueSet,
|
||||||
|
theFilter,
|
||||||
|
theAdd,
|
||||||
|
theAddedCodes,
|
||||||
|
myHibernatePropertiesProvider.isOracleDialect());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
@ -664,10 +670,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
return expansionTimestamp;
|
return expansionTimestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isOracleDialect() {
|
|
||||||
return myHibernatePropertiesProvider.getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void expandConcepts(
|
private void expandConcepts(
|
||||||
ValueSetExpansionOptions theExpansionOptions,
|
ValueSetExpansionOptions theExpansionOptions,
|
||||||
IValueSetConceptAccumulator theAccumulator,
|
IValueSetConceptAccumulator theAccumulator,
|
||||||
|
@ -1596,6 +1598,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
TermConcept code = findCodeForFilterCriteria(theSystem, theFilter);
|
TermConcept code = findCodeForFilterCriteria(theSystem, theFilter);
|
||||||
|
|
||||||
if (theFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
if (theFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||||
|
ourLog.debug(
|
||||||
|
" * Filtering on specific code and codes with a parent of {}/{}/{}",
|
||||||
|
code.getId(),
|
||||||
|
code.getCode(),
|
||||||
|
code.getDisplay());
|
||||||
|
|
||||||
|
b.must(f.bool()
|
||||||
|
.should(f.match().field("myParentPids").matching("" + code.getId()))
|
||||||
|
.should(f.match().field("myId").matching(code.getId())));
|
||||||
|
} else if (theFilter.getOp() == ValueSet.FilterOperator.DESCENDENTOF) {
|
||||||
ourLog.debug(
|
ourLog.debug(
|
||||||
" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,326 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import com.google.common.hash.HashCode;
|
||||||
|
import com.google.common.hash.HashFunction;
|
||||||
|
import com.google.common.hash.Hashing;
|
||||||
|
import org.hl7.fhir.dstu3.hapi.ctx.FhirDstu3;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.r4.hapi.ctx.FhirR4;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.Month;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
class ResourceHistoryCalculatorTest {
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculatorTest.class);
|
||||||
|
|
||||||
|
private static final FhirContext CONTEXT = FhirContext.forR4Cached();
|
||||||
|
|
||||||
|
private static final ResourceHistoryCalculator CALCULATOR_ORACLE = new ResourceHistoryCalculator(CONTEXT, true);
|
||||||
|
private static final ResourceHistoryCalculator CALCULATOR_NON_ORACLE = new ResourceHistoryCalculator(CONTEXT, false);
|
||||||
|
|
||||||
|
private static final LocalDate TODAY = LocalDate.of(2024, Month.JANUARY, 25);
|
||||||
|
private static final String ENCODED_RESOURCE_1 = "1234";
|
||||||
|
private static final String ENCODED_RESOURCE_2 = "abcd";
|
||||||
|
private static final String RESOURCE_TEXT_VC = "resourceTextVc";
|
||||||
|
private static final List<String> EXCLUDED_ELEMENTS_1 = List.of("id");
|
||||||
|
private static final List<String> EXCLUDED_ELEMENTS_2 = List.of("resourceType", "birthDate");
|
||||||
|
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||||
|
|
||||||
|
private static Stream<Arguments> calculateResourceHistoryStateArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The purpose of this test is to ensure that the conditional logic to pre-calculate resource history text or binaries
|
||||||
|
* is respected.
|
||||||
|
* If this is for Oracle, the resource text will be driven off a binary with a given encoding with the
|
||||||
|
* resource text effectively ignored.
|
||||||
|
* If this is not Oracle, it will be driven off a JSON encoded text field with
|
||||||
|
* the binary effectively ignored.
|
||||||
|
*/
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("calculateResourceHistoryStateArguments")
|
||||||
|
void calculateResourceHistoryState(FhirContext theFhirContext, boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||||
|
final IBaseResource patient = getPatient(theFhirContext);
|
||||||
|
|
||||||
|
final ResourceHistoryCalculator calculator = getCalculator(theFhirContext, theIsOracle);
|
||||||
|
final ResourceHistoryState result = calculator.calculateResourceHistoryState(patient, theResourceEncoding, theExcludedElements);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
assertNotNull(result.getResourceBinary()); // On Oracle: We use the resource binary to serve up the resource content
|
||||||
|
assertNull(result.getResourceText()); // On Oracle: We do NOT use the resource text to serve up the resource content
|
||||||
|
assertEquals(theResourceEncoding, result.getEncoding()); // On Oracle, the resource encoding is what we used to encode the binary
|
||||||
|
assertEquals(SHA_256.hashBytes(result.getResourceBinary()), result.getHashCode()); // On Oracle, the SHA 256 hash is of the binary
|
||||||
|
} else {
|
||||||
|
assertNull(result.getResourceBinary()); // Non-Oracle: We do NOT use the resource binary to serve up the resource content
|
||||||
|
assertNotNull(result.getResourceText()); // Non-Oracle: We use the resource text to serve up the resource content
|
||||||
|
assertEquals(ResourceEncodingEnum.JSON, result.getEncoding()); // Non-Oracle, since we didn't encode a binary this is always JSON.
|
||||||
|
final HashCode expectedHashCode = SHA_256.hashUnencodedChars(calculator.encodeResource(patient, theResourceEncoding, theExcludedElements)); // Non-Oracle, the SHA 256 hash is of the parsed resource object
|
||||||
|
assertEquals(expectedHashCode, result.getHashCode());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static Stream<Arguments> conditionallyAlterHistoryEntityArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("conditionallyAlterHistoryEntityArguments")
|
||||||
|
void conditionallyAlterHistoryEntity_usesVarcharForOracle(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theResourceText) {
|
||||||
|
final ResourceTable resourceTable = new ResourceTable();
|
||||||
|
resourceTable.setId(123L);
|
||||||
|
|
||||||
|
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||||
|
resourceHistoryTable.setVersion(1);
|
||||||
|
resourceHistoryTable.setResource("resource".getBytes(StandardCharsets.UTF_8));
|
||||||
|
resourceHistoryTable.setEncoding(theResourceEncoding);
|
||||||
|
resourceHistoryTable.setResourceTextVc(RESOURCE_TEXT_VC);
|
||||||
|
|
||||||
|
final boolean isChanged =
|
||||||
|
getCalculator(theIsOracle).conditionallyAlterHistoryEntity(resourceTable, resourceHistoryTable, theResourceText);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
assertFalse(isChanged);
|
||||||
|
assertNotNull(resourceHistoryTable.getResource());
|
||||||
|
assertEquals(RESOURCE_TEXT_VC, resourceHistoryTable.getResourceTextVc());
|
||||||
|
assertEquals(resourceHistoryTable.getEncoding(), resourceHistoryTable.getEncoding());
|
||||||
|
} else {
|
||||||
|
assertTrue(isChanged);
|
||||||
|
assertNull(resourceHistoryTable.getResource());
|
||||||
|
assertEquals(theResourceText, resourceHistoryTable.getResourceTextVc());
|
||||||
|
assertEquals(resourceHistoryTable.getEncoding(), ResourceEncodingEnum.JSON);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> encodeResourceArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("encodeResourceArguments")
|
||||||
|
void encodeResource_ensureFhirVersionSpecificAndIntendedElementsExcluded(FhirContext theFhirContext, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||||
|
final IBaseResource patient = getPatient(theFhirContext);
|
||||||
|
final String encodedResource = getCalculator(theFhirContext, true).encodeResource(patient, theResourceEncoding, theExcludedElements);
|
||||||
|
|
||||||
|
final String expectedEncoding =
|
||||||
|
theResourceEncoding.newParser(theFhirContext).setDontEncodeElements(theExcludedElements).encodeResourceToString(patient);
|
||||||
|
|
||||||
|
assertEquals(expectedEncoding, encodedResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> getResourceBinaryArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("getResourceBinaryArguments")
|
||||||
|
void getResourceBinary(ResourceEncodingEnum theResourceEncoding, String theEncodedResource) {
|
||||||
|
final byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(theResourceEncoding, theEncodedResource);
|
||||||
|
|
||||||
|
switch (theResourceEncoding) {
|
||||||
|
case JSON:
|
||||||
|
assertArrayEquals(theEncodedResource.getBytes(StandardCharsets.UTF_8), resourceBinary);
|
||||||
|
break;
|
||||||
|
case JSONC:
|
||||||
|
assertArrayEquals(GZipUtil.compress(theEncodedResource), resourceBinary);
|
||||||
|
break;
|
||||||
|
case DEL :
|
||||||
|
case ESR :
|
||||||
|
default:
|
||||||
|
assertArrayEquals(new byte[0], resourceBinary);
|
||||||
|
}
|
||||||
|
|
||||||
|
ourLog.info("resourceBinary: {}", resourceBinary);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> isResourceHistoryChangedArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(true, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("isResourceHistoryChangedArguments")
|
||||||
|
void isResourceHistoryChanged(boolean theIsOracle, byte[] theNewBinary, String theNewResourceText) {
|
||||||
|
final String existngResourceText = ENCODED_RESOURCE_1;
|
||||||
|
final byte[] existingBytes = existngResourceText.getBytes(StandardCharsets.UTF_8);
|
||||||
|
|
||||||
|
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||||
|
resourceHistoryTable.setResource(existingBytes);
|
||||||
|
resourceHistoryTable.setResourceTextVc(existngResourceText);
|
||||||
|
|
||||||
|
final boolean isChanged = getCalculator(theIsOracle).isResourceHistoryChanged(resourceHistoryTable, theNewBinary, theNewResourceText);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
final boolean expectedResult = !Arrays.equals(existingBytes, theNewBinary);
|
||||||
|
assertEquals(expectedResult, isChanged);
|
||||||
|
} else {
|
||||||
|
final boolean expectedResult = ! existngResourceText.equals(theNewResourceText);
|
||||||
|
assertEquals(expectedResult, isChanged);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> populateEncodedResourceArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("populateEncodedResourceArguments")
|
||||||
|
void populateEncodedResource(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theEncodedResourceString) {
|
||||||
|
final EncodedResource encodedResource = new EncodedResource();
|
||||||
|
final byte[] resourceBinary = theEncodedResourceString.getBytes(StandardCharsets.UTF_8);
|
||||||
|
|
||||||
|
getCalculator(theIsOracle)
|
||||||
|
.populateEncodedResource(encodedResource, theEncodedResourceString, resourceBinary, theResourceEncoding);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
assertEquals(resourceBinary, encodedResource.getResourceBinary());
|
||||||
|
assertNull(encodedResource.getResourceText());
|
||||||
|
assertEquals(theResourceEncoding, encodedResource.getEncoding());
|
||||||
|
} else {
|
||||||
|
assertNull(encodedResource.getResourceBinary());
|
||||||
|
assertEquals(theEncodedResourceString, encodedResource.getResourceText());
|
||||||
|
assertEquals(ResourceEncodingEnum.JSON, encodedResource.getEncoding());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ResourceHistoryCalculator getCalculator(boolean theIsOracle) {
|
||||||
|
return theIsOracle ? CALCULATOR_ORACLE : CALCULATOR_NON_ORACLE;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ResourceHistoryCalculator getCalculator(FhirContext theFhirContext, boolean theIsOracle) {
|
||||||
|
return new ResourceHistoryCalculator(theFhirContext, theIsOracle);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IBaseResource getPatient(FhirContext theFhirContext) {
|
||||||
|
if (theFhirContext.getVersion() instanceof FhirR4) {
|
||||||
|
return getPatientR4();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theFhirContext.getVersion() instanceof FhirDstu3) {
|
||||||
|
return getPatientDstu3();
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private org.hl7.fhir.dstu3.model.Patient getPatientDstu3() {
|
||||||
|
final org.hl7.fhir.dstu3.model.Patient patient = new org.hl7.fhir.dstu3.model.Patient();
|
||||||
|
|
||||||
|
patient.setId("123");
|
||||||
|
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||||
|
|
||||||
|
return patient;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Patient getPatientR4() {
|
||||||
|
final Patient patient = new Patient();
|
||||||
|
|
||||||
|
patient.setId("123");
|
||||||
|
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||||
|
|
||||||
|
return patient;
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -786,7 +786,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
||||||
logAndValidateValueSet(result);
|
logAndValidateValueSet(result);
|
||||||
|
|
||||||
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
||||||
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
|
assertThat(codes, containsInAnyOrder("childAA", "childAAA", "childAAB"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>7.1.0-SNAPSHOT</version>
|
<version>7.1.3-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
|
@ -19,15 +19,17 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.jpa.ips.api;
|
package ca.uhn.fhir.jpa.ips.api;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.ips.strategy.BaseIpsGenerationStrategy;
|
||||||
import ca.uhn.fhir.model.api.Include;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This interface is the primary configuration and strategy provider for the
|
* This interface is the primary configuration and strategy provider for the
|
||||||
|
@ -39,11 +41,34 @@ import java.util.Set;
|
||||||
public interface IIpsGenerationStrategy {
|
public interface IIpsGenerationStrategy {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides a registry which defines the various sections that will be
|
* This method returns the profile associated with the IPS document
|
||||||
* included when generating an IPS. It can be subclassed and customized
|
* generated by this strategy.
|
||||||
* as needed in order to add, change, or remove sections.
|
|
||||||
*/
|
*/
|
||||||
SectionRegistry getSectionRegistry();
|
String getBundleProfile();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will be called once by the framework. It can be
|
||||||
|
* used to perform any initialization.
|
||||||
|
*/
|
||||||
|
void initialize();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method should return a list of the sections to include in the
|
||||||
|
* generated IPS. Note that each section must have a unique value for the
|
||||||
|
* {@link Section#getProfile()} value.
|
||||||
|
*/
|
||||||
|
@Nonnull
|
||||||
|
List<Section> getSections();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the resource supplier for the given section. The resource supplier
|
||||||
|
* is used to supply the resources which will be used for a given
|
||||||
|
* section.
|
||||||
|
*
|
||||||
|
* @param theSection The section
|
||||||
|
*/
|
||||||
|
@Nonnull
|
||||||
|
ISectionResourceSupplier getSectionResourceSupplier(@Nonnull Section theSection);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides a list of configuration property files for the IPS narrative generator.
|
* Provides a list of configuration property files for the IPS narrative generator.
|
||||||
|
@ -53,7 +78,7 @@ public interface IIpsGenerationStrategy {
|
||||||
* <p>
|
* <p>
|
||||||
* If more than one file is provided, the files will be evaluated in order. Therefore you
|
* If more than one file is provided, the files will be evaluated in order. Therefore you
|
||||||
* might choose to include a custom file, followed by
|
* might choose to include a custom file, followed by
|
||||||
* {@link ca.uhn.fhir.jpa.ips.strategy.DefaultIpsGenerationStrategy#DEFAULT_IPS_NARRATIVES_PROPERTIES}
|
* {@link BaseIpsGenerationStrategy#DEFAULT_IPS_NARRATIVES_PROPERTIES}
|
||||||
* in order to fall back to the default templates for any sections you have not
|
* in order to fall back to the default templates for any sections you have not
|
||||||
* provided an explicit template for.
|
* provided an explicit template for.
|
||||||
* </p>
|
* </p>
|
||||||
|
@ -85,7 +110,13 @@ public interface IIpsGenerationStrategy {
|
||||||
/**
|
/**
|
||||||
* This method is used to determine the resource ID to assign to a resource that
|
* This method is used to determine the resource ID to assign to a resource that
|
||||||
* will be added to the IPS document Bundle. Implementations will probably either
|
* will be added to the IPS document Bundle. Implementations will probably either
|
||||||
* return the resource ID as-is, or generate a placeholder UUID to replace it with.
|
* return <code>null</code> to leave the resource ID as-is, or generate a
|
||||||
|
* placeholder UUID to replace it with.
|
||||||
|
* <p>
|
||||||
|
* If you want to replace the native resource ID with a placeholder so as not
|
||||||
|
* to leak the server-generated IDs, the recommended way is to
|
||||||
|
* return <code>IdType.newRandomUuid()</code>
|
||||||
|
* </p>
|
||||||
*
|
*
|
||||||
* @param theIpsContext The associated context for the specific IPS document being
|
* @param theIpsContext The associated context for the specific IPS document being
|
||||||
* generated. Note that this will be <code>null</code> when
|
* generated. Note that this will be <code>null</code> when
|
||||||
|
@ -93,43 +124,33 @@ public interface IIpsGenerationStrategy {
|
||||||
* be populated for all subsequent calls for a given IPS
|
* be populated for all subsequent calls for a given IPS
|
||||||
* document generation.
|
* document generation.
|
||||||
* @param theResource The resource to massage the resource ID for
|
* @param theResource The resource to massage the resource ID for
|
||||||
* @return An ID to assign to the resource
|
* @return An ID to assign to the resource, or <code>null</code> to leave the existing ID intact,
|
||||||
|
* meaning that the server-assigned IDs will be used in the bundle.
|
||||||
*/
|
*/
|
||||||
|
@Nullable
|
||||||
IIdType massageResourceId(@Nullable IpsContext theIpsContext, @Nonnull IBaseResource theResource);
|
IIdType massageResourceId(@Nullable IpsContext theIpsContext, @Nonnull IBaseResource theResource);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method can manipulate the {@link SearchParameterMap} that will
|
* Fetches and returns the patient to include in the generated IPS for the given patient ID.
|
||||||
* be used to find candidate resources for the given IPS section. The map will already have
|
|
||||||
* a subject/patient parameter added to it. The map provided in {@literal theSearchParameterMap}
|
|
||||||
* will contain a subject/patient reference, but no other parameters. This method can add other
|
|
||||||
* parameters.
|
|
||||||
* <p>
|
|
||||||
* For example, for a Vital Signs section, the implementation might add a parameter indicating
|
|
||||||
* the parameter <code>category=vital-signs</code>.
|
|
||||||
*
|
*
|
||||||
* @param theIpsSectionContext The context, which indicates the IPS section and the resource type
|
* @throws ResourceNotFoundException If the ID is not known.
|
||||||
* being searched for.
|
|
||||||
* @param theSearchParameterMap The map to manipulate.
|
|
||||||
*/
|
|
||||||
void massageResourceSearch(
|
|
||||||
IpsContext.IpsSectionContext theIpsSectionContext, SearchParameterMap theSearchParameterMap);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a set of Include directives to be added to the resource search
|
|
||||||
* for resources to include for a given IPS section. These include statements will
|
|
||||||
* be added to the same {@link SearchParameterMap} provided to
|
|
||||||
* {@link #massageResourceSearch(IpsContext.IpsSectionContext, SearchParameterMap)}.
|
|
||||||
* This is a separate method in order to make subclassing easier.
|
|
||||||
*
|
|
||||||
* @param theIpsSectionContext The context, which indicates the IPS section and the resource type
|
|
||||||
* being searched for.
|
|
||||||
*/
|
*/
|
||||||
@Nonnull
|
@Nonnull
|
||||||
Set<Include> provideResourceSearchIncludes(IpsContext.IpsSectionContext theIpsSectionContext);
|
IBaseResource fetchPatient(IIdType thePatientId, RequestDetails theRequestDetails) throws ResourceNotFoundException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method will be called for each found resource candidate for inclusion in the
|
* Fetches and returns the patient to include in the generated IPS for the given patient identifier.
|
||||||
* IPS document. The strategy can decide whether to include it or not.
|
*
|
||||||
|
* @throws ResourceNotFoundException If the ID is not known.
|
||||||
*/
|
*/
|
||||||
boolean shouldInclude(IpsContext.IpsSectionContext theIpsSectionContext, IBaseResource theCandidate);
|
@Nonnull
|
||||||
|
IBaseResource fetchPatient(TokenParam thePatientIdentifier, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is called once for each generated IPS document, after all other processing is complete. It can
|
||||||
|
* be used by the strategy to make direct manipulations prior to returning the document.
|
||||||
|
*/
|
||||||
|
default void postManipulateIpsBundle(IBaseBundle theBundle) {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server - International Patient Summary (IPS)
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.ips.api;
|
||||||
|
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This interface is invoked when a section has no resources found, and should generate
|
||||||
|
* a "stub" resource explaining why. Typically this would be content such as "no information
|
||||||
|
* is available for this section", and might indicate for example that the absence of
|
||||||
|
* AllergyIntolerance resources only indicates that the allergy status is not known, not that
|
||||||
|
* the patient has no allergies.
|
||||||
|
*/
|
||||||
|
public interface INoInfoGenerator {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate an appropriate no-info resource. The resource does not need to have an ID populated,
|
||||||
|
* although it can if it is a resource found in the repository.
|
||||||
|
*/
|
||||||
|
IBaseResource generate(IIdType theSubjectId);
|
||||||
|
}
|
|
@ -0,0 +1,125 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server - International Patient Summary (IPS)
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.ips.api;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import jakarta.annotation.Nonnull;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.thymeleaf.util.Validate;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This interface is invoked for each section of the IPS, and fetches/returns the
|
||||||
|
* resources which will be included in the IPS document for that section. This
|
||||||
|
* might be by performing a search in a local repository, but could also be
|
||||||
|
* done by calling a remote repository, performing a calculation, making
|
||||||
|
* JDBC database calls directly, etc.
|
||||||
|
* <p>
|
||||||
|
* Note that you only need to implement this interface directly if you want to
|
||||||
|
* provide manual logic for gathering and preparing resources to include in
|
||||||
|
* an IPS document. If your resources can be collected by querying a JPS
|
||||||
|
* repository, you can use {@link ca.uhn.fhir.jpa.ips.jpa.JpaSectionResourceSupplier}
|
||||||
|
* as the implementation of this interface, and
|
||||||
|
* {@link ca.uhn.fhir.jpa.ips.jpa.IJpaSectionSearchStrategy} becomes the class
|
||||||
|
* that is used to define your searches.
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @since 7.2.0
|
||||||
|
* @see ca.uhn.fhir.jpa.ips.jpa.JpaSectionResourceSupplier
|
||||||
|
*/
|
||||||
|
public interface ISectionResourceSupplier {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will be called once for each section context (section and resource type combination),
|
||||||
|
* and will be used to supply the resources to include in the given IPS section. This method can
|
||||||
|
* be used if you wish to fetch resources for a given section from a source other than
|
||||||
|
* the repository. This could mean fetching resources using a FHIR REST client to an
|
||||||
|
* external server, or could even mean fetching data directly from a database using JDBC
|
||||||
|
* or similar.
|
||||||
|
*
|
||||||
|
* @param theIpsContext The IPS context, containing the identity of the patient whose IPS is being generated.
|
||||||
|
* @param theSectionContext The section context, containing the section name and resource type.
|
||||||
|
* @param theRequestDetails The RequestDetails object associated with the HTTP request associated with this generation.
|
||||||
|
* @return Returns a list of resources to add to the given section, or <code>null</code>.
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
<T extends IBaseResource> List<ResourceEntry> fetchResourcesForSection(
|
||||||
|
IpsContext theIpsContext, IpsSectionContext<T> theSectionContext, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This enum specifies how an individual {@link ResourceEntry resource entry} that
|
||||||
|
* is returned by {@link #fetchResourcesForSection(IpsContext, IpsSectionContext, RequestDetails)}
|
||||||
|
* should be included in the resulting IPS document bundle.
|
||||||
|
*/
|
||||||
|
enum InclusionTypeEnum {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The resource should be included in the document bundle and linked to
|
||||||
|
* from the Composition via the <code>Composition.section.entry</code>
|
||||||
|
* reference.
|
||||||
|
*/
|
||||||
|
PRIMARY_RESOURCE,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The resource should be included in the document bundle, but not directly
|
||||||
|
* linked from the composition. This typically means that it is referenced
|
||||||
|
* by at least one primary resource.
|
||||||
|
*/
|
||||||
|
SECONDARY_RESOURCE,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Do not include this resource in the document
|
||||||
|
*/
|
||||||
|
EXCLUDE
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class is the return type for {@link #fetchResourcesForSection(IpsContext, IpsSectionContext, RequestDetails)}.
|
||||||
|
*/
|
||||||
|
class ResourceEntry {
|
||||||
|
|
||||||
|
private final IBaseResource myResource;
|
||||||
|
|
||||||
|
private final InclusionTypeEnum myInclusionType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*
|
||||||
|
* @param theResource The resource to include (must not be null)
|
||||||
|
* @param theInclusionType The inclusion type (must not be null)
|
||||||
|
*/
|
||||||
|
public ResourceEntry(@Nonnull IBaseResource theResource, @Nonnull InclusionTypeEnum theInclusionType) {
|
||||||
|
Validate.notNull(theResource, "theResource must not be null");
|
||||||
|
Validate.notNull(theInclusionType, "theInclusionType must not be null");
|
||||||
|
myResource = theResource;
|
||||||
|
myInclusionType = theInclusionType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IBaseResource getResource() {
|
||||||
|
return myResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public InclusionTypeEnum getInclusionType() {
|
||||||
|
return myInclusionType;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -58,28 +58,8 @@ public class IpsContext {
|
||||||
return mySubjectId;
|
return mySubjectId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public IpsSectionContext newSectionContext(IpsSectionEnum theSection, String theResourceType) {
|
public <T extends IBaseResource> IpsSectionContext<T> newSectionContext(
|
||||||
return new IpsSectionContext(mySubject, mySubjectId, theSection, theResourceType);
|
Section theSection, Class<T> theResourceType) {
|
||||||
}
|
return new IpsSectionContext<>(mySubject, mySubjectId, theSection, theResourceType);
|
||||||
|
|
||||||
public static class IpsSectionContext extends IpsContext {
|
|
||||||
|
|
||||||
private final IpsSectionEnum mySection;
|
|
||||||
private final String myResourceType;
|
|
||||||
|
|
||||||
private IpsSectionContext(
|
|
||||||
IBaseResource theSubject, IIdType theSubjectId, IpsSectionEnum theSection, String theResourceType) {
|
|
||||||
super(theSubject, theSubjectId);
|
|
||||||
mySection = theSection;
|
|
||||||
myResourceType = theResourceType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getResourceType() {
|
|
||||||
return myResourceType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public IpsSectionEnum getSection() {
|
|
||||||
return mySection;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server - International Patient Summary (IPS)
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.ips.api;
|
||||||
|
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
|
public class IpsSectionContext<T extends IBaseResource> extends IpsContext {
|
||||||
|
|
||||||
|
private final Section mySection;
|
||||||
|
private final Class<T> myResourceType;
|
||||||
|
|
||||||
|
IpsSectionContext(IBaseResource theSubject, IIdType theSubjectId, Section theSection, Class<T> theResourceType) {
|
||||||
|
super(theSubject, theSubjectId);
|
||||||
|
mySection = theSection;
|
||||||
|
myResourceType = theResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Class<T> getResourceType() {
|
||||||
|
return myResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Section getSection() {
|
||||||
|
return mySection;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,223 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server - International Patient Summary (IPS)
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.ips.api;
|
||||||
|
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call {@link #newBuilder()} to create a new instance of this class.
|
||||||
|
*/
|
||||||
|
public class Section {
|
||||||
|
|
||||||
|
private final String myTitle;
|
||||||
|
private final String mySectionCode;
|
||||||
|
private final String mySectionDisplay;
|
||||||
|
private final List<Class<? extends IBaseResource>> myResourceTypes;
|
||||||
|
private final String myProfile;
|
||||||
|
private final INoInfoGenerator myNoInfoGenerator;
|
||||||
|
|
||||||
|
private final String mySectionSystem;
|
||||||
|
|
||||||
|
private Section(
|
||||||
|
String theTitle,
|
||||||
|
String theSectionSystem,
|
||||||
|
String theSectionCode,
|
||||||
|
String theSectionDisplay,
|
||||||
|
List<Class<? extends IBaseResource>> theResourceTypes,
|
||||||
|
String theProfile,
|
||||||
|
INoInfoGenerator theNoInfoGenerator) {
|
||||||
|
myTitle = theTitle;
|
||||||
|
mySectionSystem = theSectionSystem;
|
||||||
|
mySectionCode = theSectionCode;
|
||||||
|
mySectionDisplay = theSectionDisplay;
|
||||||
|
myResourceTypes = List.copyOf(theResourceTypes);
|
||||||
|
myProfile = theProfile;
|
||||||
|
myNoInfoGenerator = theNoInfoGenerator;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public INoInfoGenerator getNoInfoGenerator() {
|
||||||
|
return myNoInfoGenerator;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Class<? extends IBaseResource>> getResourceTypes() {
|
||||||
|
return myResourceTypes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getProfile() {
|
||||||
|
return myProfile;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTitle() {
|
||||||
|
return myTitle;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSectionSystem() {
|
||||||
|
return mySectionSystem;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSectionCode() {
|
||||||
|
return mySectionCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSectionDisplay() {
|
||||||
|
return mySectionDisplay;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object theO) {
|
||||||
|
if (theO instanceof Section) {
|
||||||
|
Section o = (Section) theO;
|
||||||
|
return StringUtils.equals(myProfile, o.myProfile);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return myProfile.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new empty section builder
|
||||||
|
*/
|
||||||
|
public static SectionBuilder newBuilder() {
|
||||||
|
return new SectionBuilder();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new section builder which is a clone of an existing section
|
||||||
|
*/
|
||||||
|
public static SectionBuilder newBuilder(Section theSection) {
|
||||||
|
return new SectionBuilder(
|
||||||
|
theSection.myTitle,
|
||||||
|
theSection.mySectionSystem,
|
||||||
|
theSection.mySectionCode,
|
||||||
|
theSection.mySectionDisplay,
|
||||||
|
theSection.myProfile,
|
||||||
|
theSection.myNoInfoGenerator,
|
||||||
|
theSection.myResourceTypes);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class SectionBuilder {
|
||||||
|
|
||||||
|
private String myTitle;
|
||||||
|
private String mySectionSystem;
|
||||||
|
private String mySectionCode;
|
||||||
|
private String mySectionDisplay;
|
||||||
|
private List<Class<? extends IBaseResource>> myResourceTypes = new ArrayList<>();
|
||||||
|
private String myProfile;
|
||||||
|
private INoInfoGenerator myNoInfoGenerator;
|
||||||
|
|
||||||
|
private SectionBuilder() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
public SectionBuilder(
|
||||||
|
String theTitle,
|
||||||
|
String theSectionSystem,
|
||||||
|
String theSectionCode,
|
||||||
|
String theSectionDisplay,
|
||||||
|
String theProfile,
|
||||||
|
INoInfoGenerator theNoInfoGenerator,
|
||||||
|
List<Class<? extends IBaseResource>> theResourceTypes) {
|
||||||
|
myTitle = theTitle;
|
||||||
|
mySectionSystem = theSectionSystem;
|
||||||
|
mySectionCode = theSectionCode;
|
||||||
|
mySectionDisplay = theSectionDisplay;
|
||||||
|
myNoInfoGenerator = theNoInfoGenerator;
|
||||||
|
myProfile = theProfile;
|
||||||
|
myResourceTypes = new ArrayList<>(theResourceTypes);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SectionBuilder withTitle(String theTitle) {
|
||||||
|
Validate.notBlank(theTitle);
|
||||||
|
myTitle = theTitle;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SectionBuilder withSectionSystem(String theSectionSystem) {
|
||||||
|
Validate.notBlank(theSectionSystem);
|
||||||
|
mySectionSystem = theSectionSystem;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SectionBuilder withSectionCode(String theSectionCode) {
|
||||||
|
Validate.notBlank(theSectionCode);
|
||||||
|
mySectionCode = theSectionCode;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SectionBuilder withSectionDisplay(String theSectionDisplay) {
|
||||||
|
Validate.notBlank(theSectionDisplay);
|
||||||
|
mySectionDisplay = theSectionDisplay;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method may be called multiple times if the section will contain multiple resource types
|
||||||
|
*/
|
||||||
|
public SectionBuilder withResourceType(Class<? extends IBaseResource> theResourceType) {
|
||||||
|
Validate.notNull(theResourceType, "theResourceType must not be null");
|
||||||
|
Validate.isTrue(!myResourceTypes.contains(theResourceType), "theResourceType has already been added");
|
||||||
|
myResourceTypes.add(theResourceType);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SectionBuilder withProfile(String theProfile) {
|
||||||
|
Validate.notBlank(theProfile);
|
||||||
|
myProfile = theProfile;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Supplies a {@link INoInfoGenerator} which is used to create a stub resource
|
||||||
|
* to place in this section if no actual contents are found. This can be
|
||||||
|
* {@literal null} if you do not want any such stub to be included for this
|
||||||
|
* section.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("UnusedReturnValue")
|
||||||
|
public SectionBuilder withNoInfoGenerator(@Nullable INoInfoGenerator theNoInfoGenerator) {
|
||||||
|
myNoInfoGenerator = theNoInfoGenerator;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Section build() {
|
||||||
|
Validate.notBlank(mySectionSystem, "No section system has been defined for this section");
|
||||||
|
Validate.notBlank(mySectionCode, "No section code has been defined for this section");
|
||||||
|
Validate.notBlank(mySectionDisplay, "No section display has been defined for this section");
|
||||||
|
|
||||||
|
return new Section(
|
||||||
|
myTitle,
|
||||||
|
mySectionSystem,
|
||||||
|
mySectionCode,
|
||||||
|
mySectionDisplay,
|
||||||
|
myResourceTypes,
|
||||||
|
myProfile,
|
||||||
|
myNoInfoGenerator);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,470 +0,0 @@
|
||||||
/*-
|
|
||||||
* #%L
|
|
||||||
* HAPI FHIR JPA Server - International Patient Summary (IPS)
|
|
||||||
* %%
|
|
||||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
|
||||||
* %%
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
* #L%
|
|
||||||
*/
|
|
||||||
package ca.uhn.fhir.jpa.ips.api;
|
|
||||||
|
|
||||||
import jakarta.annotation.Nullable;
|
|
||||||
import jakarta.annotation.PostConstruct;
|
|
||||||
import org.apache.commons.lang3.Validate;
|
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
|
||||||
import org.hl7.fhir.r4.model.AllergyIntolerance;
|
|
||||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
|
||||||
import org.hl7.fhir.r4.model.Coding;
|
|
||||||
import org.hl7.fhir.r4.model.Condition;
|
|
||||||
import org.hl7.fhir.r4.model.MedicationStatement;
|
|
||||||
import org.hl7.fhir.r4.model.Reference;
|
|
||||||
import org.hl7.fhir.r4.model.ResourceType;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This class is the registry for sections for the IPS document. It can be extended
|
|
||||||
* and customized if you wish to add / remove / change sections.
|
|
||||||
* <p>
|
|
||||||
* By default, all standard sections in the
|
|
||||||
* <a href="http://hl7.org/fhir/uv/ips/">base IPS specification IG</a>
|
|
||||||
* are included. You can customize this to remove sections, or to add new ones
|
|
||||||
* as permitted by the IG.
|
|
||||||
* </p>
|
|
||||||
* <p>
|
|
||||||
* To customize the sections, you may override the {@link #addSections()} method
|
|
||||||
* in order to add new sections or remove them. You may also override individual
|
|
||||||
* section methods such as {@link #addSectionAllergyIntolerance()} or
|
|
||||||
* {@link #addSectionAdvanceDirectives()}.
|
|
||||||
* </p>
|
|
||||||
*/
|
|
||||||
public class SectionRegistry {
|
|
||||||
|
|
||||||
private final ArrayList<Section> mySections = new ArrayList<>();
|
|
||||||
private List<Consumer<SectionBuilder>> myGlobalCustomizers = new ArrayList<>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor
|
|
||||||
*/
|
|
||||||
public SectionRegistry() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method should be automatically called by the Spring context. It initializes
|
|
||||||
* the registry.
|
|
||||||
*/
|
|
||||||
@PostConstruct
|
|
||||||
public final void initialize() {
|
|
||||||
Validate.isTrue(mySections.isEmpty(), "Sections are already initialized");
|
|
||||||
addSections();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isInitialized() {
|
|
||||||
return !mySections.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add the various sections to the registry in order. This method can be overridden for
|
|
||||||
* customization.
|
|
||||||
*/
|
|
||||||
protected void addSections() {
|
|
||||||
addSectionAllergyIntolerance();
|
|
||||||
addSectionMedicationSummary();
|
|
||||||
addSectionProblemList();
|
|
||||||
addSectionImmunizations();
|
|
||||||
addSectionProcedures();
|
|
||||||
addSectionMedicalDevices();
|
|
||||||
addSectionDiagnosticResults();
|
|
||||||
addSectionVitalSigns();
|
|
||||||
addSectionPregnancy();
|
|
||||||
addSectionSocialHistory();
|
|
||||||
addSectionIllnessHistory();
|
|
||||||
addSectionFunctionalStatus();
|
|
||||||
addSectionPlanOfCare();
|
|
||||||
addSectionAdvanceDirectives();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionAllergyIntolerance() {
|
|
||||||
addSection(IpsSectionEnum.ALLERGY_INTOLERANCE)
|
|
||||||
.withTitle("Allergies and Intolerances")
|
|
||||||
.withSectionCode("48765-2")
|
|
||||||
.withSectionDisplay("Allergies and adverse reactions Document")
|
|
||||||
.withResourceTypes(ResourceType.AllergyIntolerance.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionAllergies")
|
|
||||||
.withNoInfoGenerator(new AllergyIntoleranceNoInfoR4Generator())
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionMedicationSummary() {
|
|
||||||
addSection(IpsSectionEnum.MEDICATION_SUMMARY)
|
|
||||||
.withTitle("Medication List")
|
|
||||||
.withSectionCode("10160-0")
|
|
||||||
.withSectionDisplay("History of Medication use Narrative")
|
|
||||||
.withResourceTypes(
|
|
||||||
ResourceType.MedicationStatement.name(),
|
|
||||||
ResourceType.MedicationRequest.name(),
|
|
||||||
ResourceType.MedicationAdministration.name(),
|
|
||||||
ResourceType.MedicationDispense.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionMedications")
|
|
||||||
.withNoInfoGenerator(new MedicationNoInfoR4Generator())
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionProblemList() {
|
|
||||||
addSection(IpsSectionEnum.PROBLEM_LIST)
|
|
||||||
.withTitle("Problem List")
|
|
||||||
.withSectionCode("11450-4")
|
|
||||||
.withSectionDisplay("Problem list - Reported")
|
|
||||||
.withResourceTypes(ResourceType.Condition.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionProblems")
|
|
||||||
.withNoInfoGenerator(new ProblemNoInfoR4Generator())
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionImmunizations() {
|
|
||||||
addSection(IpsSectionEnum.IMMUNIZATIONS)
|
|
||||||
.withTitle("History of Immunizations")
|
|
||||||
.withSectionCode("11369-6")
|
|
||||||
.withSectionDisplay("History of Immunization Narrative")
|
|
||||||
.withResourceTypes(ResourceType.Immunization.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionImmunizations")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionProcedures() {
|
|
||||||
addSection(IpsSectionEnum.PROCEDURES)
|
|
||||||
.withTitle("History of Procedures")
|
|
||||||
.withSectionCode("47519-4")
|
|
||||||
.withSectionDisplay("History of Procedures Document")
|
|
||||||
.withResourceTypes(ResourceType.Procedure.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionProceduresHx")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionMedicalDevices() {
|
|
||||||
addSection(IpsSectionEnum.MEDICAL_DEVICES)
|
|
||||||
.withTitle("Medical Devices")
|
|
||||||
.withSectionCode("46264-8")
|
|
||||||
.withSectionDisplay("History of medical device use")
|
|
||||||
.withResourceTypes(ResourceType.DeviceUseStatement.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionMedicalDevices")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionDiagnosticResults() {
|
|
||||||
addSection(IpsSectionEnum.DIAGNOSTIC_RESULTS)
|
|
||||||
.withTitle("Diagnostic Results")
|
|
||||||
.withSectionCode("30954-2")
|
|
||||||
.withSectionDisplay("Relevant diagnostic tests/laboratory data Narrative")
|
|
||||||
.withResourceTypes(ResourceType.DiagnosticReport.name(), ResourceType.Observation.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionResults")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionVitalSigns() {
|
|
||||||
addSection(IpsSectionEnum.VITAL_SIGNS)
|
|
||||||
.withTitle("Vital Signs")
|
|
||||||
.withSectionCode("8716-3")
|
|
||||||
.withSectionDisplay("Vital signs")
|
|
||||||
.withResourceTypes(ResourceType.Observation.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionVitalSigns")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionPregnancy() {
|
|
||||||
addSection(IpsSectionEnum.PREGNANCY)
|
|
||||||
.withTitle("Pregnancy Information")
|
|
||||||
.withSectionCode("10162-6")
|
|
||||||
.withSectionDisplay("History of pregnancies Narrative")
|
|
||||||
.withResourceTypes(ResourceType.Observation.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionPregnancyHx")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionSocialHistory() {
|
|
||||||
addSection(IpsSectionEnum.SOCIAL_HISTORY)
|
|
||||||
.withTitle("Social History")
|
|
||||||
.withSectionCode("29762-2")
|
|
||||||
.withSectionDisplay("Social history Narrative")
|
|
||||||
.withResourceTypes(ResourceType.Observation.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionSocialHistory")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionIllnessHistory() {
|
|
||||||
addSection(IpsSectionEnum.ILLNESS_HISTORY)
|
|
||||||
.withTitle("History of Past Illness")
|
|
||||||
.withSectionCode("11348-0")
|
|
||||||
.withSectionDisplay("History of Past illness Narrative")
|
|
||||||
.withResourceTypes(ResourceType.Condition.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionPastIllnessHx")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionFunctionalStatus() {
|
|
||||||
addSection(IpsSectionEnum.FUNCTIONAL_STATUS)
|
|
||||||
.withTitle("Functional Status")
|
|
||||||
.withSectionCode("47420-5")
|
|
||||||
.withSectionDisplay("Functional status assessment note")
|
|
||||||
.withResourceTypes(ResourceType.ClinicalImpression.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionFunctionalStatus")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionPlanOfCare() {
|
|
||||||
addSection(IpsSectionEnum.PLAN_OF_CARE)
|
|
||||||
.withTitle("Plan of Care")
|
|
||||||
.withSectionCode("18776-5")
|
|
||||||
.withSectionDisplay("Plan of care note")
|
|
||||||
.withResourceTypes(ResourceType.CarePlan.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionPlanOfCare")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void addSectionAdvanceDirectives() {
|
|
||||||
addSection(IpsSectionEnum.ADVANCE_DIRECTIVES)
|
|
||||||
.withTitle("Advance Directives")
|
|
||||||
.withSectionCode("42348-3")
|
|
||||||
.withSectionDisplay("Advance directives")
|
|
||||||
.withResourceTypes(ResourceType.Consent.name())
|
|
||||||
.withProfile(
|
|
||||||
"https://hl7.org/fhir/uv/ips/StructureDefinition-Composition-uv-ips-definitions.html#Composition.section:sectionAdvanceDirectives")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
private SectionBuilder addSection(IpsSectionEnum theSectionEnum) {
|
|
||||||
return new SectionBuilder(theSectionEnum);
|
|
||||||
}
|
|
||||||
|
|
||||||
public SectionRegistry addGlobalCustomizer(Consumer<SectionBuilder> theGlobalCustomizer) {
|
|
||||||
Validate.notNull(theGlobalCustomizer, "theGlobalCustomizer must not be null");
|
|
||||||
myGlobalCustomizers.add(theGlobalCustomizer);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Section> getSections() {
|
|
||||||
Validate.isTrue(isInitialized(), "Section registry has not been initialized");
|
|
||||||
return Collections.unmodifiableList(mySections);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Section getSection(IpsSectionEnum theSectionEnum) {
|
|
||||||
return getSections().stream()
|
|
||||||
.filter(t -> t.getSectionEnum() == theSectionEnum)
|
|
||||||
.findFirst()
|
|
||||||
.orElseThrow(() -> new IllegalArgumentException("No section for type: " + theSectionEnum));
|
|
||||||
}
|
|
||||||
|
|
||||||
public interface INoInfoGenerator {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate an appropriate no-info resource. The resource does not need to have an ID populated,
|
|
||||||
* although it can if it is a resource found in the repository.
|
|
||||||
*/
|
|
||||||
IBaseResource generate(IIdType theSubjectId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public class SectionBuilder {
|
|
||||||
|
|
||||||
private final IpsSectionEnum mySectionEnum;
|
|
||||||
private String myTitle;
|
|
||||||
private String mySectionCode;
|
|
||||||
private String mySectionDisplay;
|
|
||||||
private List<String> myResourceTypes;
|
|
||||||
private String myProfile;
|
|
||||||
private INoInfoGenerator myNoInfoGenerator;
|
|
||||||
|
|
||||||
public SectionBuilder(IpsSectionEnum theSectionEnum) {
|
|
||||||
mySectionEnum = theSectionEnum;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SectionBuilder withTitle(String theTitle) {
|
|
||||||
Validate.notBlank(theTitle);
|
|
||||||
myTitle = theTitle;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SectionBuilder withSectionCode(String theSectionCode) {
|
|
||||||
Validate.notBlank(theSectionCode);
|
|
||||||
mySectionCode = theSectionCode;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SectionBuilder withSectionDisplay(String theSectionDisplay) {
|
|
||||||
Validate.notBlank(theSectionDisplay);
|
|
||||||
mySectionDisplay = theSectionDisplay;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SectionBuilder withResourceTypes(String... theResourceTypes) {
|
|
||||||
Validate.isTrue(theResourceTypes.length > 0);
|
|
||||||
myResourceTypes = Arrays.asList(theResourceTypes);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SectionBuilder withProfile(String theProfile) {
|
|
||||||
Validate.notBlank(theProfile);
|
|
||||||
myProfile = theProfile;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SectionBuilder withNoInfoGenerator(INoInfoGenerator theNoInfoGenerator) {
|
|
||||||
myNoInfoGenerator = theNoInfoGenerator;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void build() {
|
|
||||||
myGlobalCustomizers.forEach(t -> t.accept(this));
|
|
||||||
mySections.add(new Section(
|
|
||||||
mySectionEnum,
|
|
||||||
myTitle,
|
|
||||||
mySectionCode,
|
|
||||||
mySectionDisplay,
|
|
||||||
myResourceTypes,
|
|
||||||
myProfile,
|
|
||||||
myNoInfoGenerator));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class AllergyIntoleranceNoInfoR4Generator implements INoInfoGenerator {
|
|
||||||
@Override
|
|
||||||
public IBaseResource generate(IIdType theSubjectId) {
|
|
||||||
AllergyIntolerance allergy = new AllergyIntolerance();
|
|
||||||
allergy.setCode(new CodeableConcept()
|
|
||||||
.addCoding(new Coding()
|
|
||||||
.setCode("no-allergy-info")
|
|
||||||
.setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips")
|
|
||||||
.setDisplay("No information about allergies")))
|
|
||||||
.setPatient(new Reference(theSubjectId))
|
|
||||||
.setClinicalStatus(new CodeableConcept()
|
|
||||||
.addCoding(new Coding()
|
|
||||||
.setCode("active")
|
|
||||||
.setSystem("http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical")));
|
|
||||||
return allergy;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class MedicationNoInfoR4Generator implements INoInfoGenerator {
|
|
||||||
@Override
|
|
||||||
public IBaseResource generate(IIdType theSubjectId) {
|
|
||||||
MedicationStatement medication = new MedicationStatement();
|
|
||||||
// setMedicationCodeableConcept is not available
|
|
||||||
medication
|
|
||||||
.setMedication(new CodeableConcept()
|
|
||||||
.addCoding(new Coding()
|
|
||||||
.setCode("no-medication-info")
|
|
||||||
.setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips")
|
|
||||||
.setDisplay("No information about medications")))
|
|
||||||
.setSubject(new Reference(theSubjectId))
|
|
||||||
.setStatus(MedicationStatement.MedicationStatementStatus.UNKNOWN);
|
|
||||||
// .setEffective(new
|
|
||||||
// Period().addExtension().setUrl("http://hl7.org/fhir/StructureDefinition/data-absent-reason").setValue((new Coding().setCode("not-applicable"))))
|
|
||||||
return medication;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class ProblemNoInfoR4Generator implements INoInfoGenerator {
|
|
||||||
@Override
|
|
||||||
public IBaseResource generate(IIdType theSubjectId) {
|
|
||||||
Condition condition = new Condition();
|
|
||||||
condition
|
|
||||||
.setCode(new CodeableConcept()
|
|
||||||
.addCoding(new Coding()
|
|
||||||
.setCode("no-problem-info")
|
|
||||||
.setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips")
|
|
||||||
.setDisplay("No information about problems")))
|
|
||||||
.setSubject(new Reference(theSubjectId))
|
|
||||||
.setClinicalStatus(new CodeableConcept()
|
|
||||||
.addCoding(new Coding()
|
|
||||||
.setCode("active")
|
|
||||||
.setSystem("http://terminology.hl7.org/CodeSystem/condition-clinical")));
|
|
||||||
return condition;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class Section {
|
|
||||||
|
|
||||||
private final IpsSectionEnum mySectionEnum;
|
|
||||||
private final String myTitle;
|
|
||||||
private final String mySectionCode;
|
|
||||||
private final String mySectionDisplay;
|
|
||||||
private final List<String> myResourceTypes;
|
|
||||||
private final String myProfile;
|
|
||||||
private final INoInfoGenerator myNoInfoGenerator;
|
|
||||||
|
|
||||||
public Section(
|
|
||||||
IpsSectionEnum theSectionEnum,
|
|
||||||
String theTitle,
|
|
||||||
String theSectionCode,
|
|
||||||
String theSectionDisplay,
|
|
||||||
List<String> theResourceTypes,
|
|
||||||
String theProfile,
|
|
||||||
INoInfoGenerator theNoInfoGenerator) {
|
|
||||||
mySectionEnum = theSectionEnum;
|
|
||||||
myTitle = theTitle;
|
|
||||||
mySectionCode = theSectionCode;
|
|
||||||
mySectionDisplay = theSectionDisplay;
|
|
||||||
myResourceTypes = Collections.unmodifiableList(new ArrayList<>(theResourceTypes));
|
|
||||||
myProfile = theProfile;
|
|
||||||
myNoInfoGenerator = theNoInfoGenerator;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable
|
|
||||||
public INoInfoGenerator getNoInfoGenerator() {
|
|
||||||
return myNoInfoGenerator;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getResourceTypes() {
|
|
||||||
return myResourceTypes;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getProfile() {
|
|
||||||
return myProfile;
|
|
||||||
}
|
|
||||||
|
|
||||||
public IpsSectionEnum getSectionEnum() {
|
|
||||||
return mySectionEnum;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getTitle() {
|
|
||||||
return myTitle;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getSectionCode() {
|
|
||||||
return mySectionCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getSectionDisplay() {
|
|
||||||
return mySectionDisplay;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue