Merge branch 'master' into 2515_MDM_survivorship_rules
This commit is contained in:
commit
1f10a15c35
|
@ -31,6 +31,7 @@ charset = utf-8
|
|||
indent_style = tab
|
||||
tab_width = 3
|
||||
indent_size = 3
|
||||
continuation_indent_size=3
|
||||
ij_java_align_consecutive_assignments = false
|
||||
ij_java_align_consecutive_variable_declarations = false
|
||||
ij_java_align_group_field_declarations = false
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.util.Set;
|
|||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
|
||||
|
@ -601,6 +602,11 @@ public abstract class BaseRuntimeElementCompositeDefinition<T extends IBase> ext
|
|||
public boolean isFirstFieldInNewClass() {
|
||||
return myFirstFieldInNewClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return myField.getName();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -902,21 +902,21 @@ public class FhirContext {
|
|||
}
|
||||
|
||||
private BaseRuntimeElementDefinition<?> scanDatatype(final Class<? extends IElement> theResourceType) {
|
||||
ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<Class<? extends IElement>>();
|
||||
ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<>();
|
||||
resourceTypes.add(theResourceType);
|
||||
Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> defs = scanResourceTypes(resourceTypes);
|
||||
return defs.get(theResourceType);
|
||||
}
|
||||
|
||||
private RuntimeResourceDefinition scanResourceType(final Class<? extends IBaseResource> theResourceType) {
|
||||
ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<Class<? extends IElement>>();
|
||||
ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<>();
|
||||
resourceTypes.add(theResourceType);
|
||||
Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> defs = scanResourceTypes(resourceTypes);
|
||||
return (RuntimeResourceDefinition) defs.get(theResourceType);
|
||||
}
|
||||
|
||||
private synchronized Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> scanResourceTypes(final Collection<Class<? extends IElement>> theResourceTypes) {
|
||||
List<Class<? extends IBase>> typesToScan = new ArrayList<Class<? extends IBase>>();
|
||||
List<Class<? extends IBase>> typesToScan = new ArrayList<>();
|
||||
if (theResourceTypes != null) {
|
||||
typesToScan.addAll(theResourceTypes);
|
||||
}
|
||||
|
|
|
@ -394,41 +394,30 @@ class ModelScanner {
|
|||
b.append(" provides compartment membership but is not of type 'reference'");
|
||||
ourLog.warn(b.toString());
|
||||
continue;
|
||||
// throw new ConfigurationException(b.toString());
|
||||
}
|
||||
providesMembershipInCompartments.add(next.name());
|
||||
}
|
||||
|
||||
List<RuntimeSearchParam.Component> components = null;
|
||||
if (paramType == RestSearchParameterTypeEnum.COMPOSITE) {
|
||||
compositeFields.put(nextField, searchParam);
|
||||
continue;
|
||||
components = new ArrayList<>();
|
||||
for (String next : searchParam.compositeOf()) {
|
||||
String ref = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + next;
|
||||
components.add(new RuntimeSearchParam.Component(null, ref));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Collection<String> base = Collections.singletonList(theResourceDef.getName());
|
||||
RuntimeSearchParam param = new RuntimeSearchParam(null, null, searchParam.name(), searchParam.description(), searchParam.path(), paramType, null, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, base);
|
||||
String url = null;
|
||||
if (theResourceDef.isStandardType()) {
|
||||
url = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + searchParam.name();
|
||||
}
|
||||
RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, false, components, base);
|
||||
theResourceDef.addSearchParam(param);
|
||||
nameToParam.put(param.getName(), param);
|
||||
}
|
||||
}
|
||||
|
||||
for (Entry<Field, SearchParamDefinition> nextEntry : compositeFields.entrySet()) {
|
||||
SearchParamDefinition searchParam = nextEntry.getValue();
|
||||
|
||||
List<RuntimeSearchParam> compositeOf = new ArrayList<>();
|
||||
for (String nextName : searchParam.compositeOf()) {
|
||||
RuntimeSearchParam param = nameToParam.get(nextName);
|
||||
if (param == null) {
|
||||
ourLog.warn("Search parameter {}.{} declares that it is a composite with compositeOf value '{}' but that is not a valid parameter name itself. Valid values are: {}",
|
||||
theResourceDef.getName(), searchParam.name(), nextName, nameToParam.keySet());
|
||||
continue;
|
||||
}
|
||||
compositeOf.add(param);
|
||||
}
|
||||
|
||||
RuntimeSearchParam param = new RuntimeSearchParam(null, null, searchParam.name(), searchParam.description(), searchParam.path(), RestSearchParameterTypeEnum.COMPOSITE, compositeOf, null, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE);
|
||||
theResourceDef.addSearchParam(param);
|
||||
}
|
||||
}
|
||||
|
||||
private Set<String> toTargetList(Class<? extends IBaseResource>[] theTarget) {
|
||||
|
|
|
@ -187,6 +187,11 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini
|
|||
for (RuntimeSearchParam next : searchParams) {
|
||||
if (next.getProvidesMembershipInCompartments() != null) {
|
||||
for (String nextCompartment : next.getProvidesMembershipInCompartments()) {
|
||||
|
||||
if (nextCompartment.startsWith("Base FHIR compartment definition for ")) {
|
||||
nextCompartment = nextCompartment.substring("Base FHIR compartment definition for ".length());
|
||||
}
|
||||
|
||||
if (!compartmentNameToSearchParams.containsKey(nextCompartment)) {
|
||||
compartmentNameToSearchParams.put(nextCompartment, new ArrayList<>());
|
||||
}
|
||||
|
|
|
@ -46,7 +46,6 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
|||
public class RuntimeSearchParam {
|
||||
private final IIdType myId;
|
||||
private final Set<String> myBase;
|
||||
private final List<RuntimeSearchParam> myCompositeOf;
|
||||
private final String myDescription;
|
||||
private final String myName;
|
||||
private final RestSearchParameterTypeEnum myParamType;
|
||||
|
@ -56,21 +55,29 @@ public class RuntimeSearchParam {
|
|||
private final RuntimeSearchParamStatusEnum myStatus;
|
||||
private final String myUri;
|
||||
private final Map<String, List<IBaseExtension<?, ?>>> myExtensions = new HashMap<>();
|
||||
private final boolean myUnique;
|
||||
private final List<Component> myComponents;
|
||||
private IPhoneticEncoder myPhoneticEncoder;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, List<RuntimeSearchParam> theCompositeOf,
|
||||
Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus) {
|
||||
this(theId, theUri, theName, theDescription, thePath, theParamType, theCompositeOf, theProvidesMembershipInCompartments, theTargets, theStatus, null);
|
||||
public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType,
|
||||
Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus, Collection<String> theBase) {
|
||||
this(theId, theUri, theName, theDescription, thePath, theParamType, theProvidesMembershipInCompartments, theTargets, theStatus, false, Collections.emptyList(), theBase);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor
|
||||
*/
|
||||
public RuntimeSearchParam(RuntimeSearchParam theSp) {
|
||||
this(theSp.getId(), theSp.getUri(), theSp.getName(), theSp.getDescription(), theSp.getPath(), theSp.getParamType(), theSp.getProvidesMembershipInCompartments(), theSp.getTargets(), theSp.getStatus(), theSp.isUnique(), theSp.getComponents(), theSp.getBase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, List<RuntimeSearchParam> theCompositeOf,
|
||||
Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus, Collection<String> theBase) {
|
||||
public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus, boolean theUnique, List<Component> theComponents, Collection<String> theBase) {
|
||||
super();
|
||||
|
||||
myId = theId;
|
||||
|
@ -79,7 +86,6 @@ public class RuntimeSearchParam {
|
|||
myDescription = theDescription;
|
||||
myPath = thePath;
|
||||
myParamType = theParamType;
|
||||
myCompositeOf = theCompositeOf;
|
||||
myStatus = theStatus;
|
||||
if (theProvidesMembershipInCompartments != null && !theProvidesMembershipInCompartments.isEmpty()) {
|
||||
myProvidesMembershipInCompartments = Collections.unmodifiableSet(theProvidesMembershipInCompartments);
|
||||
|
@ -104,20 +110,20 @@ public class RuntimeSearchParam {
|
|||
} else {
|
||||
myBase = Collections.unmodifiableSet(new HashSet<>(theBase));
|
||||
}
|
||||
myUnique = theUnique;
|
||||
if (theComponents != null) {
|
||||
myComponents = Collections.unmodifiableList(theComponents);
|
||||
} else {
|
||||
myComponents = Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public RuntimeSearchParam(String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus) {
|
||||
this(null, null, theName, theDescription, thePath, theParamType, null, theProvidesMembershipInCompartments, theTargets, theStatus);
|
||||
public List<Component> getComponents() {
|
||||
return myComponents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor
|
||||
*/
|
||||
public RuntimeSearchParam(RuntimeSearchParam theSp) {
|
||||
this(theSp.getId(), theSp.getUri(), theSp.getName(), theSp.getDescription(), theSp.getPath(), theSp.getParamType(), theSp.getCompositeOf(), theSp.getProvidesMembershipInCompartments(), theSp.getTargets(), theSp.getStatus(), theSp.getBase());
|
||||
public boolean isUnique() {
|
||||
return myUnique;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -205,10 +211,6 @@ public class RuntimeSearchParam {
|
|||
return myStatus;
|
||||
}
|
||||
|
||||
public List<RuntimeSearchParam> getCompositeOf() {
|
||||
return myCompositeOf;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return myDescription;
|
||||
}
|
||||
|
@ -247,13 +249,6 @@ public class RuntimeSearchParam {
|
|||
return myProvidesMembershipInCompartments;
|
||||
}
|
||||
|
||||
public enum RuntimeSearchParamStatusEnum {
|
||||
ACTIVE,
|
||||
DRAFT,
|
||||
RETIRED,
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
public RuntimeSearchParam setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) {
|
||||
myPhoneticEncoder = thePhoneticEncoder;
|
||||
return this;
|
||||
|
@ -265,4 +260,42 @@ public class RuntimeSearchParam {
|
|||
}
|
||||
return myPhoneticEncoder.encode(theString);
|
||||
}
|
||||
|
||||
public enum RuntimeSearchParamStatusEnum {
|
||||
ACTIVE,
|
||||
DRAFT,
|
||||
RETIRED,
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
public static class Component {
|
||||
private final String myExpression;
|
||||
private final String myReference;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public Component(String theExpression, String theReference) {
|
||||
myExpression = theExpression;
|
||||
myReference = theReference;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("expression", myExpression)
|
||||
.append("reference", myReference)
|
||||
.toString();
|
||||
}
|
||||
|
||||
public String getExpression() {
|
||||
return myExpression;
|
||||
}
|
||||
|
||||
public String getReference() {
|
||||
return myReference;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package ca.uhn.fhir.context.phonetic;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
|
||||
// Useful for numerical identifiers like phone numbers, address parts etc.
|
||||
// This should not be used where decimals are important. A new "quantity encoder" should be added to handle cases like that.
|
||||
public class NumericEncoder implements IPhoneticEncoder {
|
||||
@Override
|
||||
public String name() {
|
||||
return "NUMERIC";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String encode(String theString) {
|
||||
// Remove everything but the numbers
|
||||
return CharMatcher.inRange('0', '9').retainFrom(theString);
|
||||
}
|
||||
}
|
|
@ -39,7 +39,8 @@ public enum PhoneticEncoderEnum {
|
|||
METAPHONE(new ApacheEncoder("METAPHONE", new Metaphone())),
|
||||
NYSIIS(new ApacheEncoder("NYSIIS", new Nysiis())),
|
||||
REFINED_SOUNDEX(new ApacheEncoder("REFINED_SOUNDEX", new RefinedSoundex())),
|
||||
SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex()));
|
||||
SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex())),
|
||||
NUMERIC(new NumericEncoder());
|
||||
|
||||
private final IPhoneticEncoder myPhoneticEncoder;
|
||||
|
||||
|
|
|
@ -32,6 +32,17 @@ public class ValueSetExpansionOptions {
|
|||
private boolean myFailOnMissingCodeSystem = true;
|
||||
private int myCount = 1000;
|
||||
private int myOffset = 0;
|
||||
private boolean myIncludeHierarchy;
|
||||
private String myFilter;
|
||||
|
||||
public String getFilter() {
|
||||
return myFilter;
|
||||
}
|
||||
|
||||
public ValueSetExpansionOptions setFilter(String theFilter) {
|
||||
myFilter = theFilter;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of codes to return.
|
||||
|
@ -94,6 +105,14 @@ public class ValueSetExpansionOptions {
|
|||
return this;
|
||||
}
|
||||
|
||||
public boolean isIncludeHierarchy() {
|
||||
return myIncludeHierarchy;
|
||||
}
|
||||
|
||||
public void setIncludeHierarchy(boolean theIncludeHierarchy) {
|
||||
myIncludeHierarchy = theIncludeHierarchy;
|
||||
}
|
||||
|
||||
public static ValueSetExpansionOptions forOffsetAndCount(int theOffset, int theCount) {
|
||||
return new ValueSetExpansionOptions()
|
||||
.setOffset(theOffset)
|
||||
|
|
|
@ -1202,6 +1202,9 @@ public enum Pointcut implements IPointcut {
|
|||
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.jpa.searchparam.SearchParameterMap - Contains the details of the search being checked. This can be modified.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
|
@ -1210,7 +1213,8 @@ public enum Pointcut implements IPointcut {
|
|||
STORAGE_PRESEARCH_REGISTERED(void.class,
|
||||
"ca.uhn.fhir.rest.server.util.ICachedSearchDetails",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.jpa.searchparam.SearchParameterMap"
|
||||
),
|
||||
|
||||
/**
|
||||
|
|
|
@ -49,13 +49,13 @@ public @interface Extension {
|
|||
* by regional authorities or jurisdictional governments)
|
||||
* </p>
|
||||
*/
|
||||
boolean definedLocally();
|
||||
boolean definedLocally() default true;
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if this extension is a <a
|
||||
* href="http://www.hl7.org/implement/standards/fhir/extensibility.html#modifierExtension">modifier extension</a>
|
||||
*/
|
||||
boolean isModifier();
|
||||
boolean isModifier() default false;
|
||||
|
||||
/**
|
||||
* The URL associated with this extension
|
||||
|
|
|
@ -32,7 +32,7 @@ public interface INarrativeTemplate {
|
|||
|
||||
Set<String> getAppliesToResourceTypes();
|
||||
|
||||
Set<Class<? extends IBase>> getAppliesToResourceClasses();
|
||||
Set<Class<? extends IBase>> getAppliesToClasses();
|
||||
|
||||
TemplateTypeEnum getTemplateType();
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ public class NarrativeTemplate implements INarrativeTemplate {
|
|||
private Set<String> myAppliesToProfiles = new HashSet<>();
|
||||
private Set<String> myAppliesToResourceTypes = new HashSet<>();
|
||||
private Set<String> myAppliesToDataTypes = new HashSet<>();
|
||||
private Set<Class<? extends IBase>> myAppliesToResourceClasses = new HashSet<>();
|
||||
private Set<Class<? extends IBase>> myAppliesToClasses = new HashSet<>();
|
||||
private TemplateTypeEnum myTemplateType = TemplateTypeEnum.THYMELEAF;
|
||||
private String myContextPath;
|
||||
private String myTemplateName;
|
||||
|
@ -79,12 +79,12 @@ public class NarrativeTemplate implements INarrativeTemplate {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Set<Class<? extends IBase>> getAppliesToResourceClasses() {
|
||||
return Collections.unmodifiableSet(myAppliesToResourceClasses);
|
||||
public Set<Class<? extends IBase>> getAppliesToClasses() {
|
||||
return Collections.unmodifiableSet(myAppliesToClasses);
|
||||
}
|
||||
|
||||
void addAppliesToResourceClass(Class<? extends IBase> theAppliesToResourceClass) {
|
||||
myAppliesToResourceClasses.add(theAppliesToResourceClass);
|
||||
void addAppliesToClass(Class<? extends IBase> theAppliesToClass) {
|
||||
myAppliesToClasses.add(theAppliesToClass);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -118,4 +118,5 @@ public class NarrativeTemplate implements INarrativeTemplate {
|
|||
void addAppliesToDatatype(String theDataType) {
|
||||
myAppliesToDataTypes.add(theDataType);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.narrative2;
|
|||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -32,8 +33,20 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -41,15 +54,17 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class NarrativeTemplateManifest implements INarrativeTemplateManifest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(NarrativeTemplateManifest.class);
|
||||
|
||||
private final Map<String, List<NarrativeTemplate>> myStyleToResourceTypeToTemplate;
|
||||
private final Map<String, List<NarrativeTemplate>> myStyleToDatatypeToTemplate;
|
||||
private final Map<String, List<NarrativeTemplate>> myStyleToNameToTemplate;
|
||||
private final Map<String, List<NarrativeTemplate>> myResourceTypeToTemplate;
|
||||
private final Map<String, List<NarrativeTemplate>> myDatatypeToTemplate;
|
||||
private final Map<String, List<NarrativeTemplate>> myNameToTemplate;
|
||||
private final Map<String, List<NarrativeTemplate>> myClassToTemplate;
|
||||
private final int myTemplateCount;
|
||||
|
||||
private NarrativeTemplateManifest(Collection<NarrativeTemplate> theTemplates) {
|
||||
Map<String, List<NarrativeTemplate>> resourceTypeToTemplate = new HashMap<>();
|
||||
Map<String, List<NarrativeTemplate>> datatypeToTemplate = new HashMap<>();
|
||||
Map<String, List<NarrativeTemplate>> nameToTemplate = new HashMap<>();
|
||||
Map<String, List<NarrativeTemplate>> classToTemplate = new HashMap<>();
|
||||
|
||||
for (NarrativeTemplate nextTemplate : theTemplates) {
|
||||
nameToTemplate.computeIfAbsent(nextTemplate.getTemplateName(), t -> new ArrayList<>()).add(nextTemplate);
|
||||
|
@ -59,12 +74,16 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest {
|
|||
for (String nextDataType : nextTemplate.getAppliesToDataTypes()) {
|
||||
datatypeToTemplate.computeIfAbsent(nextDataType.toUpperCase(), t -> new ArrayList<>()).add(nextTemplate);
|
||||
}
|
||||
for (Class<? extends IBase> nextAppliesToClass : nextTemplate.getAppliesToClasses()) {
|
||||
classToTemplate.computeIfAbsent(nextAppliesToClass.getName(), t -> new ArrayList<>()).add(nextTemplate);
|
||||
}
|
||||
}
|
||||
|
||||
myTemplateCount = theTemplates.size();
|
||||
myStyleToNameToTemplate = makeImmutable(nameToTemplate);
|
||||
myStyleToResourceTypeToTemplate = makeImmutable(resourceTypeToTemplate);
|
||||
myStyleToDatatypeToTemplate = makeImmutable(datatypeToTemplate);
|
||||
myClassToTemplate = makeImmutable(classToTemplate);
|
||||
myNameToTemplate = makeImmutable(nameToTemplate);
|
||||
myResourceTypeToTemplate = makeImmutable(resourceTypeToTemplate);
|
||||
myDatatypeToTemplate = makeImmutable(datatypeToTemplate);
|
||||
}
|
||||
|
||||
public int getNamedTemplateCount() {
|
||||
|
@ -73,23 +92,27 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest {
|
|||
|
||||
@Override
|
||||
public List<INarrativeTemplate> getTemplateByResourceName(FhirContext theFhirContext, EnumSet<TemplateTypeEnum> theStyles, String theResourceName) {
|
||||
return getFromMap(theStyles, theResourceName.toUpperCase(), myStyleToResourceTypeToTemplate);
|
||||
return getFromMap(theStyles, theResourceName.toUpperCase(), myResourceTypeToTemplate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<INarrativeTemplate> getTemplateByName(FhirContext theFhirContext, EnumSet<TemplateTypeEnum> theStyles, String theName) {
|
||||
return getFromMap(theStyles, theName, myStyleToNameToTemplate);
|
||||
return getFromMap(theStyles, theName, myNameToTemplate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<INarrativeTemplate> getTemplateByElement(FhirContext theFhirContext, EnumSet<TemplateTypeEnum> theStyles, IBase theElement) {
|
||||
if (theElement instanceof IBaseResource) {
|
||||
String resourceName = theFhirContext.getResourceDefinition((IBaseResource) theElement).getName();
|
||||
return getTemplateByResourceName(theFhirContext, theStyles, resourceName);
|
||||
} else {
|
||||
String datatypeName = theFhirContext.getElementDefinition(theElement.getClass()).getName();
|
||||
return getFromMap(theStyles, datatypeName.toUpperCase(), myStyleToDatatypeToTemplate);
|
||||
List<INarrativeTemplate> retVal = getFromMap(theStyles, theElement.getClass().getName(), myClassToTemplate);
|
||||
if (retVal.isEmpty()) {
|
||||
if (theElement instanceof IBaseResource) {
|
||||
String resourceName = theFhirContext.getResourceDefinition((IBaseResource) theElement).getName();
|
||||
retVal = getTemplateByResourceName(theFhirContext, theStyles, resourceName);
|
||||
} else {
|
||||
String datatypeName = theFhirContext.getElementDefinition(theElement.getClass()).getName();
|
||||
retVal = getFromMap(theStyles, datatypeName.toUpperCase(), myDatatypeToTemplate);
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static NarrativeTemplateManifest forManifestFileLocation(String... thePropertyFilePaths) throws IOException {
|
||||
|
@ -134,9 +157,16 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest {
|
|||
|
||||
NarrativeTemplate nextTemplate = nameToTemplate.computeIfAbsent(name, t -> new NarrativeTemplate().setTemplateName(name));
|
||||
|
||||
Validate.isTrue(!nextKey.endsWith(".class"), "Narrative manifest does not support specifying templates by class name - Use \"[name].resourceType=[resourceType]\" instead");
|
||||
|
||||
if (nextKey.endsWith(".profile")) {
|
||||
if (nextKey.endsWith(".class")) {
|
||||
String className = file.getProperty(nextKey);
|
||||
if (isNotBlank(className)) {
|
||||
try {
|
||||
nextTemplate.addAppliesToClass((Class<? extends IBase>) Class.forName(className));
|
||||
} catch (ClassNotFoundException theE) {
|
||||
throw new InternalErrorException("Could not find class " + className + " declared in narative manifest");
|
||||
}
|
||||
}
|
||||
} else if (nextKey.endsWith(".profile")) {
|
||||
String profile = file.getProperty(nextKey);
|
||||
if (isNotBlank(profile)) {
|
||||
nextTemplate.addAppliesToProfile(profile);
|
||||
|
@ -144,17 +174,17 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest {
|
|||
} else if (nextKey.endsWith(".resourceType")) {
|
||||
String resourceType = file.getProperty(nextKey);
|
||||
Arrays
|
||||
.stream(resourceType.split(","))
|
||||
.map(t -> t.trim())
|
||||
.filter(t -> isNotBlank(t))
|
||||
.forEach(t -> nextTemplate.addAppliesToResourceType(t));
|
||||
.stream(resourceType.split(","))
|
||||
.map(t -> t.trim())
|
||||
.filter(t -> isNotBlank(t))
|
||||
.forEach(t -> nextTemplate.addAppliesToResourceType(t));
|
||||
} else if (nextKey.endsWith(".dataType")) {
|
||||
String dataType = file.getProperty(nextKey);
|
||||
Arrays
|
||||
.stream(dataType.split(","))
|
||||
.map(t -> t.trim())
|
||||
.filter(t -> isNotBlank(t))
|
||||
.forEach(t -> nextTemplate.addAppliesToDatatype(t));
|
||||
.stream(dataType.split(","))
|
||||
.map(t -> t.trim())
|
||||
.filter(t -> isNotBlank(t))
|
||||
.forEach(t -> nextTemplate.addAppliesToDatatype(t));
|
||||
} else if (nextKey.endsWith(".style")) {
|
||||
String templateTypeName = file.getProperty(nextKey).toUpperCase();
|
||||
TemplateTypeEnum templateType = TemplateTypeEnum.valueOf(templateTypeName);
|
||||
|
@ -171,9 +201,9 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest {
|
|||
} else if (nextKey.endsWith(".title")) {
|
||||
ourLog.debug("Ignoring title property as narrative generator no longer generates titles: {}", nextKey);
|
||||
} else {
|
||||
throw new ConfigurationException("Invalid property name: " + nextKey
|
||||
+ " - the key must end in one of the expected extensions "
|
||||
+ "'.profile', '.resourceType', '.dataType', '.style', '.contextPath', '.narrative', '.title'");
|
||||
throw new ConfigurationException("Invalid property name: " + nextKey
|
||||
+ " - the key must end in one of the expected extensions "
|
||||
+ "'.profile', '.resourceType', '.dataType', '.style', '.contextPath', '.narrative', '.title'");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -210,10 +240,10 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest {
|
|||
|
||||
private static <T> List<INarrativeTemplate> getFromMap(EnumSet<TemplateTypeEnum> theStyles, T theKey, Map<T, List<NarrativeTemplate>> theMap) {
|
||||
return theMap
|
||||
.getOrDefault(theKey, Collections.emptyList())
|
||||
.stream()
|
||||
.filter(t->theStyles.contains(t.getTemplateType()))
|
||||
.collect(Collectors.toList());
|
||||
.getOrDefault(theKey, Collections.emptyList())
|
||||
.stream()
|
||||
.filter(t -> theStyles.contains(t.getTemplateType()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private static <T> Map<T, List<NarrativeTemplate>> makeImmutable(Map<T, List<NarrativeTemplate>> theStyleToResourceTypeToTemplate) {
|
||||
|
|
|
@ -980,7 +980,12 @@ public abstract class BaseParser implements IParser {
|
|||
myEncodeContext = theEncodeContext;
|
||||
}
|
||||
|
||||
private void addParent(CompositeChildElement theParent, StringBuilder theB) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return myDef.getElementName();
|
||||
}
|
||||
|
||||
private void addParent(CompositeChildElement theParent, StringBuilder theB) {
|
||||
if (theParent != null) {
|
||||
if (theParent.myResDef != null) {
|
||||
theB.append(theParent.myResDef.getName());
|
||||
|
|
|
@ -379,7 +379,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser {
|
|||
}
|
||||
|
||||
boolean haveWrittenExtensions = false;
|
||||
for (CompositeChildElement nextChildElem : super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext)) {
|
||||
Iterable<CompositeChildElement> compositeChildElements = super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext);
|
||||
for (CompositeChildElement nextChildElem : compositeChildElements) {
|
||||
|
||||
BaseRuntimeChildDefinition nextChild = nextChildElem.getDef();
|
||||
|
||||
|
|
|
@ -300,7 +300,7 @@ public class RDFParser extends BaseParser {
|
|||
|
||||
String propertyName = constructPredicateName(resource, childDefinition, childName, parentElement);
|
||||
if (element != null) {
|
||||
XSDDatatype dataType = getXSDDataTypeForFhirType(element.fhirType());
|
||||
XSDDatatype dataType = getXSDDataTypeForFhirType(element.fhirType(), encodedValue);
|
||||
rdfResource.addProperty(rdfModel.createProperty(propertyName), this.createFhirValueBlankNode(rdfModel, encodedValue, dataType, cardinalityIndex));
|
||||
}
|
||||
}
|
||||
|
@ -314,7 +314,7 @@ public class RDFParser extends BaseParser {
|
|||
if (value != null || !hasNoExtensions(pd)) {
|
||||
if (value != null) {
|
||||
String propertyName = constructPredicateName(resource, childDefinition, childName, parentElement);
|
||||
XSDDatatype dataType = getXSDDataTypeForFhirType(pd.fhirType());
|
||||
XSDDatatype dataType = getXSDDataTypeForFhirType(pd.fhirType(), value);
|
||||
Resource valueResource = this.createFhirValueBlankNode(rdfModel, value, dataType, cardinalityIndex);
|
||||
if (!hasNoExtensions(pd)) {
|
||||
IBaseHasExtensions hasExtension = (IBaseHasExtensions)pd;
|
||||
|
@ -411,7 +411,7 @@ public class RDFParser extends BaseParser {
|
|||
* @param fhirType hapi field type
|
||||
* @return XSDDatatype value
|
||||
*/
|
||||
private XSDDatatype getXSDDataTypeForFhirType(String fhirType) {
|
||||
private XSDDatatype getXSDDataTypeForFhirType(String fhirType, String value) {
|
||||
switch (fhirType) {
|
||||
case "boolean":
|
||||
return XSDDatatype.XSDboolean;
|
||||
|
@ -423,7 +423,16 @@ public class RDFParser extends BaseParser {
|
|||
return XSDDatatype.XSDdate;
|
||||
case "dateTime":
|
||||
case "instant":
|
||||
return XSDDatatype.XSDdateTime;
|
||||
switch (value.length()) { // assumes valid lexical value
|
||||
case 4:
|
||||
return XSDDatatype.XSDgYear;
|
||||
case 7:
|
||||
return XSDDatatype.XSDgYearMonth;
|
||||
case 10:
|
||||
return XSDDatatype.XSDdate;
|
||||
default:
|
||||
return XSDDatatype.XSDdateTime;
|
||||
}
|
||||
case "code":
|
||||
case "string":
|
||||
default:
|
||||
|
|
|
@ -205,6 +205,7 @@ public class Constants {
|
|||
public static final String PARAMQUALIFIER_STRING_CONTAINS = ":contains";
|
||||
public static final String PARAMQUALIFIER_STRING_EXACT = ":exact";
|
||||
public static final String PARAMQUALIFIER_TOKEN_TEXT = ":text";
|
||||
public static final String PARAMQUALIFIER_MDM = ":mdm";
|
||||
public static final int STATUS_HTTP_200_OK = 200;
|
||||
public static final int STATUS_HTTP_201_CREATED = 201;
|
||||
public static final int STATUS_HTTP_204_NO_CONTENT = 204;
|
||||
|
|
|
@ -1,5 +1,19 @@
|
|||
package ca.uhn.fhir.rest.param;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterOr;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.IntegerDt;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
||||
import ca.uhn.fhir.util.ReflectionUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
|
@ -8,25 +22,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterOr;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.IntegerDt;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.param.binder.QueryParameterAndBinder;
|
||||
import ca.uhn.fhir.util.ReflectionUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
|
@ -59,126 +54,6 @@ public class ParameterUtil {
|
|||
return (T) value;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a utility method intended provided to help the JPA module.
|
||||
*/
|
||||
public static IQueryParameterAnd<?> parseQueryParams(FhirContext theContext, RestSearchParameterTypeEnum paramType,
|
||||
String theUnqualifiedParamName, List<QualifiedParamList> theParameters) {
|
||||
QueryParameterAndBinder binder;
|
||||
switch (paramType) {
|
||||
case COMPOSITE:
|
||||
throw new UnsupportedOperationException();
|
||||
case DATE:
|
||||
binder = new QueryParameterAndBinder(DateAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case NUMBER:
|
||||
binder = new QueryParameterAndBinder(NumberAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case QUANTITY:
|
||||
binder = new QueryParameterAndBinder(QuantityAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case REFERENCE:
|
||||
binder = new QueryParameterAndBinder(ReferenceAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case STRING:
|
||||
binder = new QueryParameterAndBinder(StringAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case TOKEN:
|
||||
binder = new QueryParameterAndBinder(TokenAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case URI:
|
||||
binder = new QueryParameterAndBinder(UriAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case HAS:
|
||||
binder = new QueryParameterAndBinder(HasAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
case SPECIAL:
|
||||
binder = new QueryParameterAndBinder(SpecialAndListParam.class,
|
||||
Collections.emptyList());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Parameter '" + theUnqualifiedParamName + "' has type " + paramType + " which is currently not supported.");
|
||||
}
|
||||
|
||||
return binder.parse(theContext, theUnqualifiedParamName, theParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a utility method intended provided to help the JPA module.
|
||||
*/
|
||||
public static IQueryParameterAnd<?> parseQueryParams(FhirContext theContext, RuntimeSearchParam theParamDef,
|
||||
String theUnqualifiedParamName, List<QualifiedParamList> theParameters) {
|
||||
|
||||
RestSearchParameterTypeEnum paramType = theParamDef.getParamType();
|
||||
|
||||
if (paramType == RestSearchParameterTypeEnum.COMPOSITE) {
|
||||
|
||||
List<RuntimeSearchParam> theCompositList = theParamDef.getCompositeOf();
|
||||
|
||||
if (theCompositList == null) {
|
||||
throw new ConfigurationException("Search parameter of type " + theUnqualifiedParamName
|
||||
+ " can be found in parameter annotation, found ");
|
||||
}
|
||||
|
||||
if (theCompositList.size() != 2) {
|
||||
throw new ConfigurationException("Search parameter of type " + theUnqualifiedParamName
|
||||
+ " must have 2 composite types declared in parameter annotation, found "
|
||||
+ theCompositList.size());
|
||||
}
|
||||
|
||||
RuntimeSearchParam left = theCompositList.get(0);
|
||||
RuntimeSearchParam right = theCompositList.get(1);
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
CompositeAndListParam<IQueryParameterType, IQueryParameterType> cp = new CompositeAndListParam(
|
||||
getCompositBindingClass(left.getParamType(), left.getName()),
|
||||
getCompositBindingClass(right.getParamType(), right.getName()));
|
||||
|
||||
cp.setValuesAsQueryTokens(theContext, theUnqualifiedParamName, theParameters);
|
||||
|
||||
return cp;
|
||||
} else {
|
||||
return parseQueryParams(theContext, paramType, theUnqualifiedParamName, theParameters);
|
||||
}
|
||||
}
|
||||
|
||||
private static Class<?> getCompositBindingClass(RestSearchParameterTypeEnum paramType,
|
||||
String theUnqualifiedParamName) {
|
||||
|
||||
switch (paramType) {
|
||||
case DATE:
|
||||
return DateParam.class;
|
||||
case NUMBER:
|
||||
return NumberParam.class;
|
||||
case QUANTITY:
|
||||
return QuantityParam.class;
|
||||
case REFERENCE:
|
||||
return ReferenceParam.class;
|
||||
case STRING:
|
||||
return StringParam.class;
|
||||
case TOKEN:
|
||||
return TokenParam.class;
|
||||
case URI:
|
||||
return UriParam.class;
|
||||
case HAS:
|
||||
return HasParam.class;
|
||||
case SPECIAL:
|
||||
return SpecialParam.class;
|
||||
|
||||
default:
|
||||
throw new IllegalArgumentException("Parameter '" + theUnqualifiedParamName + "' has type " + paramType
|
||||
+ " which is currently not supported.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes :modifiers and .chains from URL parameter names
|
||||
*/
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.rest.param;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
@ -41,6 +42,7 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
|
|||
private String myBaseUrl;
|
||||
private String myValue;
|
||||
private String myIdPart;
|
||||
private Boolean myMdmExpand;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -90,8 +92,8 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
String doGetQueryParameterQualifier() {
|
||||
|
||||
private String defaultGetQueryParameterQualifier() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
if (isNotBlank(myChain)) {
|
||||
if (isNotBlank(getResourceType())) {
|
||||
|
@ -106,6 +108,10 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
|
|||
}
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
String doGetQueryParameterQualifier() {
|
||||
return this.myMdmExpand != null ? ":mdm" : defaultGetQueryParameterQualifier();
|
||||
}
|
||||
|
||||
@Override
|
||||
String doGetValueAsQueryToken(FhirContext theContext) {
|
||||
|
@ -121,6 +127,11 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
|
|||
|
||||
@Override
|
||||
void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) {
|
||||
if (Constants.PARAMQUALIFIER_MDM.equals(theQualifier)) {
|
||||
myMdmExpand = true;
|
||||
theQualifier = "";
|
||||
}
|
||||
|
||||
String q = theQualifier;
|
||||
if (isNotBlank(q)) {
|
||||
if (q.startsWith(":")) {
|
||||
|
@ -166,6 +177,14 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
|
|||
return myBaseUrl;
|
||||
}
|
||||
|
||||
public boolean isMdmExpand() {
|
||||
return myMdmExpand != null && myMdmExpand;
|
||||
}
|
||||
|
||||
public ReferenceParam setMdmExpand(boolean theMdmExpand) {
|
||||
myMdmExpand = theMdmExpand;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getChain() {
|
||||
return myChain;
|
||||
|
|
|
@ -156,7 +156,8 @@ public class BundleBuilder {
|
|||
|
||||
// Bundle.entry.request.url
|
||||
IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
|
||||
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().getValue());
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue());
|
||||
myEntryRequestUrlChild.getMutator().setValue(request, url);
|
||||
|
||||
// Bundle.entry.request.url
|
||||
|
|
|
@ -68,7 +68,9 @@ public enum VersionEnum {
|
|||
V5_2_0,
|
||||
V5_2_1,
|
||||
V5_3_0,
|
||||
V5_4_0;
|
||||
V5_3_2,
|
||||
V5_4_0,
|
||||
;
|
||||
|
||||
public static VersionEnum latestVersion() {
|
||||
VersionEnum[] values = VersionEnum.values();
|
||||
|
|
|
@ -68,8 +68,8 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri
|
|||
|
||||
# JPA Messages
|
||||
|
||||
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
|
||||
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
|
||||
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
|
||||
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
|
|
|
@ -11,14 +11,7 @@
|
|||
<tr th:each="issue : ${resource.issue}">
|
||||
<td th:text="${issue.severityElement.value}" style="font-weight: bold;"></td>
|
||||
<td th:text="${issue.location}"></td>
|
||||
<th:block th:switch="${fhirVersion}">
|
||||
<th:block th:case="'DSTU1'">
|
||||
<td><pre th:text="${issue.details}"/></td>
|
||||
</th:block>
|
||||
<th:block th:case="*">
|
||||
<td><pre th:text="${issue.diagnostics}"/></td>
|
||||
</th:block>
|
||||
</th:block>
|
||||
<td><pre th:text="${issue.diagnostics}"/></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
package ca.uhn.fhir.context.phonetic;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
class PhoneticEncoderTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(PhoneticEncoderTest.class);
|
||||
|
@ -23,7 +23,11 @@ class PhoneticEncoderTest {
|
|||
public void testEncodeAddress(PhoneticEncoderEnum thePhoneticEncoderEnum) {
|
||||
String encoded = thePhoneticEncoderEnum.getPhoneticEncoder().encode(ADDRESS_LINE);
|
||||
ourLog.info("{}: {}", thePhoneticEncoderEnum.name(), encoded);
|
||||
assertThat(encoded, startsWith(NUMBER + " "));
|
||||
assertThat(encoded, endsWith(" " + SUITE));
|
||||
if (thePhoneticEncoderEnum == PhoneticEncoderEnum.NUMERIC) {
|
||||
assertEquals(NUMBER + SUITE, encoded);
|
||||
} else {
|
||||
assertThat(encoded, startsWith(NUMBER + " "));
|
||||
assertThat(encoded, endsWith(" " + SUITE));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3;
|
|||
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
|
||||
import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider;
|
||||
import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -78,13 +78,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu2</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-subscription</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -101,7 +101,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-testpage-overlay</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<classifier>classes</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -2,6 +2,14 @@
|
|||
type: add
|
||||
issue: 2449
|
||||
title: "Adds interceptors for the following functionality:
|
||||
* Data normalization (n11n) - removing unwanted characters (control, etc. as defined by the requirements)
|
||||
* Data standardization (s13n) - normalizing data by ensuring word spacing and character cases are uniform
|
||||
* Data validation - making sure that addresses / emails are validated"
|
||||
<ul>
|
||||
<li>
|
||||
Data normalization (n11n) - removing unwanted characters (control, etc. as defined by the requirements)
|
||||
</li>
|
||||
<li>
|
||||
Data standardization (s13n) - normalizing data by ensuring word spacing and character cases are uniform
|
||||
</li>
|
||||
<li>
|
||||
Data validation - making sure that addresses / emails are validated
|
||||
</li>
|
||||
</ul>"
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2493
|
||||
title: "A database deadlock in Postgresql was observed when uploading large terminology CodeSystems using
|
||||
deferred uploading. Thanks to Tyge Folke Nielsen for reporting and suggesting a fix!"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2520
|
||||
title: "Add support for `:mdm` search parameter qualifier on reference search parameters. Details about enabling this feature
|
||||
can be found [in the documentation](/hapi-fhir/docs/server_jpa_mdm/mdm_expansion.html)."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2523
|
||||
title: "A new Validation Support Module has been added called UnknownCodeSystemWarningValidationSupport. This module
|
||||
allows validation to produce a warning but not an error if a code being validated references
|
||||
an unknown code system."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2525
|
||||
title: "A new optional parameter has been added to the `ValueSet/$expand` operation. When provided a value of `true`, the
|
||||
operation will include the concept hierarchy in the expansion response."
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2528
|
||||
title: "An issue with compartment definitions in R5 models was fixed. This issue caused some authorization
|
||||
rules to reject valid requests. Thanks to Patrick Palacin for reporting!"
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2533
|
||||
title: "When issuing a request for a specific Resource and also specifying an _include param,
|
||||
the referenced resource is not returned when there is only 1 version of the referenced resource available.
|
||||
When there are more than 1 versions available, the referenced resource is returned in the response bundle."
|
||||
backport: 5.3.2
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2535
|
||||
title: "An issue with package installer involving logical StructureDefinition resources was fixed. Package registry will no
|
||||
longer attempt to generate a snapshot for logical StructureDefinition resources if one is not already provided in the
|
||||
resource definition."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2537
|
||||
title: "It is now possible t create narrative generator templates that apply to any
|
||||
custom strucures including custom extension structures."
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2543
|
||||
title: "When issuing a request for a specific Resource and also specifying an _include param,
|
||||
the proper historical referenced resource is not returned when there are more than 1 versions of the
|
||||
referenced resource available, after the reference has been changed from the original version 1 to some other version.
|
||||
When there are more than 1 versions available, and the referring resource had previously referred to version 1
|
||||
but now refers to version 4, the resource returned in the response bundle is for version 1."
|
||||
backport: 5.3.2
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2547
|
||||
title: "Added new NUMERIC mdm matcher for matching phone numbers. Also added NUMERIC phonetic encoder to support
|
||||
adding NUMERIC encoded search parameter (e.g. if searching for matching phone numbers is required by mdm candidate searching)."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2556
|
||||
title: "Fixed a bug which would cause Bulk Export to fail when run in a partitioned environment."
|
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
- item:
|
||||
type: "add"
|
||||
title: "The version of a few dependencies have been bumped to the latest versions
|
||||
(dependent HAPI modules listed in brackets):
|
||||
<ul>
|
||||
<li>Commons-Lang3 (Core): 3.9 -> 3.12.0</li>
|
||||
<li>Commons-Text (Core): 1.7 -> 1.9</li>
|
||||
<li>Commons-Codec (Core): 1.14 -> 1.15</li>
|
||||
<li>Commons-IO (Core): 2.6 -> 2.8.0</li>
|
||||
<li>Guava (Core): 30.1-jre -> 30.1.1-jre</li>
|
||||
<li>Jackson (Core): 2.12.1 -> 2.12.3</li>
|
||||
<li>Woodstox (Core): 6.2.3 -> 6.2.5</li>
|
||||
<li>Gson (JPA): 2.8.5 -> 2.8.6</li>
|
||||
<li>Caffeine (JPA): 2.7.0 -> 3.0.1</li>
|
||||
<li>Hibernate (JPA): 5.4.26.Final -> 5.4.30.Final</li>
|
||||
<li>Hibernate Search (JPA): 6.0.0.Final -> 6.0.2.Final</li>
|
||||
<li>Spring (JPA): 5.3.3 -> 5.3.6</li>
|
||||
<li>Spring Batch (JPA): 4.2.3.RELEASE -> 4.3.2</li>
|
||||
<li>Spring Data (JPA): 2.4.2 -> 2.4.7</li>
|
||||
<li>Commons DBCP2 (JPA): 2.7.0 -> 2.8.0</li>
|
||||
<li>Thymeleaf (Testpage Overlay): 3.0.11.RELEASE -> 3.0.12.RELEASE</li>
|
||||
<li>JAnsi (CLI): 2.1.1 -> 2.3.2</li>
|
||||
<li>JArchivelib (CLI): 1.0.0 -> 1.1.0</li>
|
||||
</ul>
|
||||
"
|
|
@ -67,6 +67,7 @@ page.server_jpa_mdm.mdm_rules=MDM Rules
|
|||
page.server_jpa_mdm.mdm_eid=MDM Enterprise Identifiers
|
||||
page.server_jpa_mdm.mdm_operations=MDM Operations
|
||||
page.server_jpa_mdm.mdm_details=MDM Technical Details
|
||||
page.server_jpa_mdm.mdm_expansion=MDM Search Expansion
|
||||
|
||||
section.server_jpa_partitioning.title=JPA Server: Partitioning and Multitenancy
|
||||
page.server_jpa_partitioning.partitioning=Partitioning and Multitenancy
|
||||
|
|
|
@ -117,7 +117,7 @@ If you wish to override this behaviour and supply a static CapabilityStatement,
|
|||
* [StaticCapabilityStatementInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.html)
|
||||
* [StaticCapabilityStatementInterceptor Source](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.java)
|
||||
|
||||
The following example shows how to register the ExceptionHandlingInterceptor.
|
||||
The following example shows how to register the StaticCapabilityStatementInterceptor.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ServletExamples.java|staticCapabilityStatementInterceptor}}
|
||||
|
@ -299,7 +299,7 @@ The RepositoryValidatingInterceptor can be used to enforce validation rules on d
|
|||
|
||||
# Data Standardization
|
||||
|
||||
`StandardizingInterceptor` handles data standardization (s13n) requirements. This interceptor applies standardization rules on all FHIR primitives as configured in the `s13n.json` file that should be made available on the classpath. This file contains FHIRPath definitions together with the standardizers that should be applied to that path. It comes with six per-build standardizers: NAME_FAMILY, NAME_GIVEN, EMAIL, TITLE, PHONE and TEXT. Custom standardizers can be developed by implementing `ca.uhn.fhir.rest.server.interceptor.s13n.standardizers.IStandardizer` interface.
|
||||
`StandardizingInterceptor` handles data standardization (s13n) requirements. This interceptor applies standardization rules on all FHIR primitives as configured in the `s13n.json` file that should be made available on the classpath. This file contains FHIRPath definitions together with the standardizers that should be applied to that path. It comes with six pre-built standardizers: NAME_FAMILY, NAME_GIVEN, EMAIL, TITLE, PHONE and TEXT. Custom standardizers can be developed by implementing `ca.uhn.fhir.rest.server.interceptor.s13n.standardizers.IStandardizer` interface.
|
||||
|
||||
A sample configuration file can be found below:
|
||||
|
||||
|
|
|
@ -7,12 +7,115 @@ This process is described on the [Profiles & Extensions](./profiles_and_exte
|
|||
There are situations however when you might want to create an entirely custom resource type. This feature should be used only if there is no other option, since it means you are creating a resource type that will not be interoperable with other FHIR implementations.
|
||||
|
||||
<p class="doc_info_bubble">
|
||||
This is an advanced features and isn't needed for most uses of HAPI-FHIR. Feel free to skip this page.
|
||||
This is an advanced features and isn't needed for most uses of HAPI FHIR. Feel free to skip this page. For a simpler way of interacting with resource extensions, see <a href="./profiles_and_extensions.html">Profiles & Extensions</a>.
|
||||
</p>
|
||||
|
||||
|
||||
# Extending FHIR Resource Classes
|
||||
|
||||
The most elegant way of adding extensions to a resource is through the use of custom fields. The following example shows a custom type which extends the FHIR Patient resource definition through two extensions.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatient.java|patientDef}}
|
||||
```
|
||||
|
||||
Using this custom type is as simple as instantiating the type and working with the new fields.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientUse}}
|
||||
```
|
||||
|
||||
This example produces the following output:
|
||||
|
||||
```xml
|
||||
<Patient xmlns="http://hl7.org/fhir">
|
||||
<modifierExtension url="http://example.com/dontuse#importantDates">
|
||||
<valueDateTime value="2010-01-02"/>
|
||||
</modifierExtension>
|
||||
<modifierExtension url="http://example.com/dontuse#importantDates">
|
||||
<valueDateTime value="2014-01-26T11:11:11"/>
|
||||
</modifierExtension>
|
||||
<extension url="http://example.com/dontuse#petname">
|
||||
<valueString value="Fido"/>
|
||||
</extension>
|
||||
<name>
|
||||
<family value="Smith"/>
|
||||
<given value="John"/>
|
||||
<given value="Quincy"/>
|
||||
<suffix value="Jr"/>
|
||||
</name>
|
||||
</Patient>
|
||||
```
|
||||
|
||||
Parsing messages using your new custom type is equally simple. These types can also be used as method return types in clients and servers.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientParse}}
|
||||
```
|
||||
|
||||
# Using Custom Types in a Client
|
||||
|
||||
If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}}
|
||||
```
|
||||
|
||||
You may also explicitly use custom types in searches and other operations which return resources.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch}}
|
||||
```
|
||||
|
||||
You can also explicitly declare a preferred response resource custom type. This is useful for some operations that do not otherwise declare their resource types in the method signature.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch2}}
|
||||
```
|
||||
|
||||
## Using Multiple Custom Types in a Client
|
||||
|
||||
Sometimes you may not know in advance exactly which type you will be receiving. For example, there are Patient resources which conform to several different profiles on a server and you aren't sure which profile you will get back for a specific read, you can declare the "primary" type for a given profile.
|
||||
|
||||
This is declared at the FhirContext level, and will apply to any clients created from this context (including clients created before the default was set).
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientDeclared}}
|
||||
```
|
||||
# Using Custom Types in a Server
|
||||
|
||||
If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}}
|
||||
```
|
||||
|
||||
# Custom Composite Extension Classes
|
||||
|
||||
The following example shows a resource containing a composite extension.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java|resource}}
|
||||
```
|
||||
|
||||
This could be used to create a resource such as the following:
|
||||
|
||||
```xml
|
||||
<Patient xmlns="http://hl7.org/fhir">
|
||||
<id value="123"/>
|
||||
<extension url="http://acme.org/fooParent">
|
||||
<extension url="http://acme.org/fooChildA">
|
||||
<valueString value="ValueA"/>
|
||||
</extension>
|
||||
<extension url="http://acme.org/fooChildB">
|
||||
<valueString value="ValueB"/>
|
||||
</extension>
|
||||
</extension>
|
||||
</Patient>
|
||||
```
|
||||
|
||||
# Custom Resource Structure
|
||||
|
||||
The following example shows a custom resource structure class:
|
||||
The following example shows a custom resource structure class creating an entirely new resource type as opposed to simply extending an existing one. Note that this is allowable in FHIR, but is **highly discouraged** as they are by definition not good for interoperability.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/customtype/CustomResource.java|resource}}
|
||||
|
|
|
@ -63,22 +63,32 @@ Then create a properties file which describes your templates. In this properties
|
|||
The first (name.class) defines the class name of the resource to define a template for. The second (name.narrative) defines the path/classpath to the template file. The format of this path is `file:/path/foo.html` or `classpath:/com/classpath/foo.html`.
|
||||
|
||||
```properties
|
||||
# Two property lines in the file per template
|
||||
# Two property lines in the file per template. There are several forms you
|
||||
# can use. This first form assigns a template type to a resource by
|
||||
# resource name
|
||||
practitioner.resourceType=Practitioner
|
||||
practitioner.narrative=file:src/test/resources/narrative/Practitioner.html
|
||||
practitioner.narrative=classpath:com/example/narrative/Practitioner.html
|
||||
|
||||
observation.class=ca.uhn.fhir.model.dstu.resource.Observation
|
||||
observation.narrative=file:src/test/resources/narrative/Observation.html
|
||||
# This second form assigns a template by class name. This can be used for
|
||||
# HAPI FHIR built-in structures, or for custom structures as well.
|
||||
observation.class=org.hl7.fhir.r4.model.Observation
|
||||
observation.narrative=classpath:com/example/narrative/Observation.html
|
||||
|
||||
# etc...
|
||||
# You can also assign a template based on profile ID (Resource.meta.profile)
|
||||
vitalsigns.profile=http://hl7.org/fhir/StructureDefinition/vitalsigns
|
||||
vitalsigns.narrative=classpath:com/example/narrative/Observation_Vitals.html
|
||||
```
|
||||
|
||||
You may also override/define behaviour for datatypes. These datatype narrative definitions will be used as content within <code>th:narrative</code> blocks in resource templates. See the example resource template above for an example.
|
||||
You may also override/define behaviour for datatypes and other structures. These datatype narrative definitions will be used as content within <code>th:narrative</code> blocks in resource templates. See the example resource template above for an example.
|
||||
|
||||
```properties
|
||||
# datatypes use the same format as resources
|
||||
humanname.resourceType=HumanNameDt
|
||||
humanname.narrative=classpath:ca/uhn/fhir/narrative/HumanNameDt.html]]></source>
|
||||
# You can create a template based on a type name
|
||||
quantity.dataType=Quantity
|
||||
quantity.narrative=classpath:com/example/narrative/Quantity.html
|
||||
|
||||
# Or by class name, which can be useful for custom datatypes and structures
|
||||
custom_extension.class=com.example.model.MyCustomExtension
|
||||
custom_extension.narrative=classpath:com/example/narrative/CustomExtension.html
|
||||
```
|
||||
|
||||
Finally, use the [CustomThymeleafNarrativeGenerator](/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGenerator.html) and provide it to the FhirContext.
|
||||
|
|
|
@ -70,105 +70,8 @@ HAPI provides a few ways of accessing extension values in resources which are re
|
|||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|parseExtension}}
|
||||
```
|
||||
|
||||
# Custom Resource Types
|
||||
# Custom Resource Structures
|
||||
|
||||
The most elegant way of adding extensions to a resource is through the use of custom fields. The following example shows a custom type which extends the FHIR Patient resource definition through two extensions.
|
||||
All of the examples on this page show how to work with the existing data model classes.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatient.java|patientDef}}
|
||||
```
|
||||
|
||||
Using this custom type is as simple as instantiating the type and working with the new fields.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientUse}}
|
||||
```
|
||||
|
||||
This example produces the following output:
|
||||
|
||||
```xml
|
||||
<Patient xmlns="http://hl7.org/fhir">
|
||||
<modifierExtension url="http://example.com/dontuse#importantDates">
|
||||
<valueDateTime value="2010-01-02"/>
|
||||
</modifierExtension>
|
||||
<modifierExtension url="http://example.com/dontuse#importantDates">
|
||||
<valueDateTime value="2014-01-26T11:11:11"/>
|
||||
</modifierExtension>
|
||||
<extension url="http://example.com/dontuse#petname">
|
||||
<valueString value="Fido"/>
|
||||
</extension>
|
||||
<name>
|
||||
<family value="Smith"/>
|
||||
<given value="John"/>
|
||||
<given value="Quincy"/>
|
||||
<suffix value="Jr"/>
|
||||
</name>
|
||||
</Patient>
|
||||
```
|
||||
|
||||
Parsing messages using your new custom type is equally simple. These types can also be used as method return types in clients and servers.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyPatientUse.java|patientParse}}
|
||||
```
|
||||
|
||||
## Using Custom Types in a Client
|
||||
|
||||
If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}}
|
||||
```
|
||||
|
||||
You may also explicitly use custom types in searches and other operations which return resources.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch}}
|
||||
```
|
||||
|
||||
You can also explicitly declare a preferred response resource custom type. This is useful for some operations that do not otherwise declare their resource types in the method signature.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSearch2}}
|
||||
```
|
||||
|
||||
## Using Multiple Custom Types in a Client
|
||||
|
||||
Sometimes you may not know in advance exactly which type you will be receiving. For example, there are Patient resources which conform to several different profiles on a server and you aren't sure which profile you will get back for a specific read, you can declare the "primary" type for a given profile.
|
||||
|
||||
This is declared at the FhirContext level, and will apply to any clients created from this context (including clients created before the default was set).
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientDeclared}}
|
||||
```
|
||||
## Using Custom Types in a Server
|
||||
|
||||
If you are using a client and wish to use a specific custom structure, you may simply use the custom structure as you would a build in HAPI type.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java|customTypeClientSimple}}
|
||||
```
|
||||
|
||||
## Custom Type Examples: Composite Extensions
|
||||
|
||||
The following example shows a resource containing a composite extension.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java|resource}}
|
||||
```
|
||||
|
||||
This could be used to create a resource such as the following:
|
||||
|
||||
```xml
|
||||
<Patient xmlns="http://hl7.org/fhir">
|
||||
<id value="123"/>
|
||||
<extension url="http://acme.org/fooParent">
|
||||
<extension url="http://acme.org/fooChildA">
|
||||
<valueString value="ValueA"/>
|
||||
</extension>
|
||||
<extension url="http://acme.org/fooChildB">
|
||||
<valueString value="ValueB"/>
|
||||
</extension>
|
||||
</extension>
|
||||
</Patient>
|
||||
```
|
||||
This is a great way to work with extensions, and most HAPI FHIR applications use the techniques described on this page. However, there is a more advanced technique available as well, involving the creation of custom Java classes that extend the built-in classes to add statically bound extensions (as oppoed to the dynamically bound ones shown on this page). See [Custom Structures](./custom_structures.html) for more information.
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
# MDM Expansion
|
||||
|
||||
Once you have MDM enabled, and you have many linked resources, it can be useful to search across all linked resources. Let's say you have the following MDM links in your database:
|
||||
```bash
|
||||
Patient/1 --> Patient/3
|
||||
Patient/2 --> Patient/3
|
||||
```
|
||||
This indicates that both Patient/1 and Patient/2 are MDM-matched to the same golden resource (Patient/3).
|
||||
What if you want to get all observations from Patient/1, but also include any observations from all of their linked resources. You could do this by first querying the [$mdm-query-links](/docs/server_jpa_mdm/mdm_operations.html) endpoint, and then making a subsequent call like the following
|
||||
```http request
|
||||
GET http://example.com:8000/Observation?subject=Patient/1,Patient/2,Patient/3
|
||||
```
|
||||
|
||||
But HAPI-FHIR allows a shorthand for this, by means of a Search Parameter qualifier, as follows:
|
||||
```http request
|
||||
GET http://example.com:8000/Observation?subject:mdm=Patient/1
|
||||
```
|
||||
|
||||
This `:mdm` parameter qualifier instructs an interceptor in HAPI fhir to expand the set of resources included in the search by their MDM-matched resources. The two above HTTP requests will return the same result.
|
||||
|
||||
|
||||
<div class="helpWarningCalloutBox">
|
||||
One important caveat is that chaining is currently not supported when using this prefix.
|
||||
</div>
|
||||
|
||||
## Enabling MDM Expansion
|
||||
|
||||
On top of needing to instantiate an MDM module, you must enable this feature in the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) property.
|
||||
|
||||
<div class="helpWarningCalloutBox">
|
||||
It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. Use with caution.
|
||||
</div>
|
||||
|
|
@ -292,10 +292,10 @@ The following algorithms are currently supported:
|
|||
<td>Gail = Gael, Gail != Gale, Thomas != Tom</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CAVERPHONE1</td>
|
||||
<td>CAVERPHONE2</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
<a href="https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/language/Caverphone1.html">Apache Caverphone1</a>
|
||||
<a href="https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/language/Caverphone2.html">Apache Caverphone2</a>
|
||||
</td>
|
||||
<td>Gail = Gael, Gail = Gale, Thomas != Tom</td>
|
||||
</tr>
|
||||
|
@ -379,6 +379,14 @@ The following algorithms are currently supported:
|
|||
</td>
|
||||
<td>2019-12,Month = 2019-12-19,Day</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NUMERIC</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
Remove all non-numeric characters from the string before comparing.
|
||||
</td>
|
||||
<td>4169671111 = (416) 967-1111</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NAME_ANY_ORDER</td>
|
||||
<td>matcher</td>
|
||||
|
|
|
@ -134,6 +134,15 @@ This module will invoke the following operations on the remote terminology serve
|
|||
* **POST [base]/CodeSystem/$validate-code** – Validate codes in fields where no specific ValueSet is bound
|
||||
* **POST [base]/ValueSet/$validate-code** – Validate codes in fields where a specific ValueSet is bound
|
||||
|
||||
# UnknownCodeSystemWarningValidationSupport
|
||||
|
||||
[JavaDoc](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.html) / [Source](https://github.com/jamesagnew/hapi-fhir/blob/ja_20200218_validation_api_changes/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java)
|
||||
|
||||
This validation support module may be placed at the end of a ValidationSupportChain in order to configure the validator to generate a warning if a resource being validated contains an unknown code system.
|
||||
|
||||
Note that this module must also be activated by calling [setAllowNonExistentCodeSystem(true)](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.html#setAllowNonExistentCodeSystem(boolean)) in order to specify that unknown code systems should be allowed.
|
||||
|
||||
|
||||
# Recipes
|
||||
|
||||
The IValidationSupport instance passed to the FhirInstanceValidator will often resemble the chain shown in the diagram below. In this diagram:
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1716,6 +1716,36 @@ public class DaoConfig {
|
|||
this.myModelConfig.setAllowContainsSearches(theAllowContainsSearches);
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters.
|
||||
* This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier
|
||||
* will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1
|
||||
* is MDM-matched to Patient/2 and you execute the search:
|
||||
* Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
* </p>
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public boolean isAllowMdmExpansion() {
|
||||
return myModelConfig.isAllowMdmExpansion();
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters.
|
||||
* This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier
|
||||
* will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1
|
||||
* is MDM-matched to Patient/2 and you execute the search:
|
||||
* Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
* </p>
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public void setAllowMdmExpansion(boolean theAllowMdmExpansion) {
|
||||
myModelConfig.setAllowMdmExpansion(theAllowMdmExpansion);
|
||||
}
|
||||
|
||||
/**
|
||||
* This setting may be used to advise the server that any references found in
|
||||
* resources that have any of the base URLs given here will be replaced with
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.api.dao;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -28,17 +29,11 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
|||
|
||||
public interface IFhirResourceDaoValueSet<T extends IBaseResource, CD, CC> extends IFhirResourceDao<T> {
|
||||
|
||||
T expand(IIdType theId, String theFilter, RequestDetails theRequestDetails);
|
||||
T expand(IIdType theId, ValueSetExpansionOptions theOptions, RequestDetails theRequestDetails);
|
||||
|
||||
T expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails);
|
||||
T expand(T theSource, ValueSetExpansionOptions theOptions);
|
||||
|
||||
T expand(T theSource, String theFilter);
|
||||
|
||||
T expand(T theSource, String theFilter, int theOffset, int theCount);
|
||||
|
||||
T expandByIdentifier(String theUri, String theFilter);
|
||||
|
||||
T expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount);
|
||||
T expandByIdentifier(String theUri, ValueSetExpansionOptions theOptions);
|
||||
|
||||
void purgeCaches();
|
||||
|
||||
|
|
|
@ -67,6 +67,18 @@ public interface IFhirSystemDao<T, MT> extends IDao {
|
|||
*/
|
||||
IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage);
|
||||
|
||||
/**
|
||||
* Executes a FHIR transaction using a new database transaction. This method must
|
||||
* not be called from within a DB transaction.
|
||||
*/
|
||||
T transaction(RequestDetails theRequestDetails, T theResources);
|
||||
|
||||
/**
|
||||
* Executes a FHIR transaction nested inside the current database transaction.
|
||||
* This form of the transaction processor can handle write operations only (no reads)
|
||||
*/
|
||||
default T transactionNested(RequestDetails theRequestDetails, T theResources) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE3-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -20,17 +20,20 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
@Configuration
|
||||
//When you define a new batch job, add it here.
|
||||
@Import({
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class,
|
||||
BulkImportJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
|
||||
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
|
||||
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
|
||||
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.util.ExtensionUtil;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.api;
|
||||
package ca.uhn.fhir.jpa.bulk.export.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.api;
|
||||
package ca.uhn.fhir.jpa.bulk.export.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.api;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
|
@ -50,7 +50,7 @@ public interface IBulkDataExportSvc {
|
|||
|
||||
class JobInfo {
|
||||
private String myJobId;
|
||||
private BulkJobStatusEnum myStatus;
|
||||
private BulkExportJobStatusEnum myStatus;
|
||||
private List<FileEntry> myFiles;
|
||||
private String myRequest;
|
||||
private Date myStatusTime;
|
||||
|
@ -90,11 +90,11 @@ public interface IBulkDataExportSvc {
|
|||
|
||||
}
|
||||
|
||||
public BulkJobStatusEnum getStatus() {
|
||||
public BulkExportJobStatusEnum getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
public JobInfo setStatus(BulkJobStatusEnum theStatus) {
|
||||
public JobInfo setStatus(BulkExportJobStatusEnum theStatus) {
|
||||
myStatus = theStatus;
|
||||
return this;
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
|||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -102,7 +101,7 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
|
|||
myPidIterator = getResourcePidIterator();
|
||||
}
|
||||
|
||||
abstract Iterator<ResourcePersistentId> getResourcePidIterator();
|
||||
protected abstract Iterator<ResourcePersistentId> getResourcePidIterator();
|
||||
|
||||
protected List<SearchParameterMap> createSearchParameterMapsForResourceType() {
|
||||
BulkExportJobEntity jobEntity = getJobEntity();
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,16 +20,12 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
|
@ -43,7 +39,7 @@ public class BulkExportCreateEntityStepListener implements StepExecutionListener
|
|||
public void beforeStep(StepExecution theStepExecution) {
|
||||
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID");
|
||||
if (jobUuid != null) {
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.BUILDING);
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.BUILDING);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
|
@ -55,7 +55,7 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio
|
|||
}
|
||||
assert isNotBlank(jobUuid);
|
||||
String exitDescription = theStepExecution.getExitStatus().getExitDescription();
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.ERROR, exitDescription);
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.ERROR, exitDescription);
|
||||
}
|
||||
return theStepExecution.getExitStatus();
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
|
@ -44,9 +44,9 @@ public class BulkExportJobCloser implements Tasklet {
|
|||
@Override
|
||||
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
|
||||
if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) {
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.COMPLETE);
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.COMPLETE);
|
||||
} else {
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.ERROR);
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.ERROR);
|
||||
}
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -35,8 +35,6 @@ import org.springframework.batch.core.configuration.annotation.JobScope;
|
|||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.batch.item.support.CompositeItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -59,6 +57,7 @@ public class BulkExportJobConfig {
|
|||
public static final String GROUP_ID_PARAMETER = "groupId";
|
||||
public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes";
|
||||
public static final int CHUNK_SIZE = 100;
|
||||
public static final String JOB_DESCRIPTION = "jobDescription";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
@ -90,9 +89,9 @@ public class BulkExportJobConfig {
|
|||
@Lazy
|
||||
public Job bulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(partitionStep())
|
||||
.next(bulkExportPartitionStep())
|
||||
.next(closeJobStep())
|
||||
.build();
|
||||
}
|
||||
|
@ -114,7 +113,7 @@ public class BulkExportJobConfig {
|
|||
public Job groupBulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
|
||||
.validator(groupBulkJobParameterValidator())
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(groupPartitionStep())
|
||||
.next(closeJobStep())
|
||||
|
@ -125,7 +124,7 @@ public class BulkExportJobConfig {
|
|||
@Lazy
|
||||
public Job patientBulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(patientPartitionStep())
|
||||
.next(closeJobStep())
|
||||
|
@ -150,8 +149,9 @@ public class BulkExportJobConfig {
|
|||
return new CreateBulkExportEntityTasklet();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator bulkJobParameterValidator() {
|
||||
public JobParametersValidator bulkExportJobParameterValidator() {
|
||||
return new BulkExportJobParameterValidator();
|
||||
}
|
||||
|
||||
|
@ -159,7 +159,7 @@ public class BulkExportJobConfig {
|
|||
@Bean
|
||||
public Step groupBulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(groupBulkItemReader())
|
||||
.processor(inflateResourceThenAnnotateWithGoldenResourceProcessor())
|
||||
.writer(resourceToFileWriter())
|
||||
|
@ -170,17 +170,18 @@ public class BulkExportJobConfig {
|
|||
@Bean
|
||||
public Step bulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(bulkItemReader())
|
||||
.processor(myPidToIBaseResourceProcessor)
|
||||
.writer(resourceToFileWriter())
|
||||
.listener(bulkExportGenerateResourceFilesStepListener())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step patientBulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(patientBulkItemReader())
|
||||
.processor(myPidToIBaseResourceProcessor)
|
||||
.writer(resourceToFileWriter())
|
||||
|
@ -214,7 +215,7 @@ public class BulkExportJobConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public Step partitionStep() {
|
||||
public Step bulkExportPartitionStep() {
|
||||
return myStepBuilderFactory.get("partitionStep")
|
||||
.partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner())
|
||||
.step(bulkExportGenerateResourceFilesStep())
|
||||
|
@ -240,7 +241,7 @@ public class BulkExportJobConfig {
|
|||
|
||||
@Bean
|
||||
@StepScope
|
||||
public GroupBulkItemReader groupBulkItemReader(){
|
||||
public GroupBulkItemReader groupBulkItemReader() {
|
||||
return new GroupBulkItemReader();
|
||||
}
|
||||
|
||||
|
@ -252,7 +253,7 @@ public class BulkExportJobConfig {
|
|||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkItemReader bulkItemReader(){
|
||||
public BulkItemReader bulkItemReader() {
|
||||
return new BulkItemReader();
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
|||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.springframework.batch.core.JobParametersBuilder;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
|||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -43,7 +42,7 @@ public class BulkItemReader extends BaseBulkItemReader {
|
|||
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID);
|
||||
Set<ResourcePersistentId> myReadPids = new HashSet<>();
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -87,7 +87,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
|
|||
}
|
||||
}
|
||||
|
||||
private void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
|
||||
public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
|
||||
theChunkContext
|
||||
.getStepContext()
|
||||
.getStepExecution()
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
|||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
|
@ -36,7 +37,6 @@ import ca.uhn.fhir.model.primitive.IdDt;
|
|||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
@ -81,7 +81,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
Set<ResourcePersistentId> myReadPids = new HashSet<>();
|
||||
|
||||
//Short circuit out if we detect we are attempting to extract patients
|
||||
|
@ -119,7 +119,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
Set<Long> patientPidsToExport = new HashSet<>(pidsOrThrowException);
|
||||
|
||||
if (myMdmEnabled) {
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), srd);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
List<IMdmLinkDao.MdmPidTuple> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
goldenPidSourcePidTuple.forEach(tuple -> {
|
||||
|
@ -179,13 +180,12 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
* @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"]
|
||||
*/
|
||||
private List<String> getMembers() {
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
|
||||
List<IPrimitiveType> evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class);
|
||||
return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Given the local myGroupId, perform an expansion to retrieve all resource IDs of member patients.
|
||||
* if myMdmEnabled is set to true, we also reach out to the IMdmLinkDao to attempt to also expand it into matched
|
||||
|
@ -195,7 +195,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
*/
|
||||
private Set<String> expandAllPatientPidsFromGroup() {
|
||||
Set<String> expandedIds = new HashSet<>();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
|
||||
//Attempt to perform MDM Expansion of membership
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -26,7 +26,7 @@ import org.springframework.batch.core.JobParametersInvalidException;
|
|||
import org.springframework.batch.core.JobParametersValidator;
|
||||
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig.*;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig.*;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class GroupIdPresentValidator implements JobParametersValidator {
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -61,7 +61,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea
|
|||
}
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
|
||||
String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export";
|
||||
ourLog.error(errorMessage);
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -25,8 +25,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.util.BinaryUtil;
|
||||
|
@ -100,7 +101,7 @@ public class ResourceToFileWriter implements ItemWriter<List<IBaseResource>> {
|
|||
IBaseBinary binary = BinaryUtil.newBinary(myFhirContext);
|
||||
binary.setContentType(Constants.CT_FHIR_NDJSON);
|
||||
binary.setContent(myOutputStream.toByteArray());
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary);
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary, new SystemRequestDetails());
|
||||
return outcome.getResource().getIdElement();
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.core.partition.support.Partitioner;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.model;
|
||||
package ca.uhn.fhir.jpa.bulk.export.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,14 @@ package ca.uhn.fhir.jpa.bulk.model;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public enum BulkJobStatusEnum {
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
|
||||
@JsonFormat(shape = JsonFormat.Shape.STRING)
|
||||
public enum BulkExportJobStatusEnum {
|
||||
|
||||
/**
|
||||
* Sorting OK!
|
||||
*/
|
||||
|
||||
SUBMITTED,
|
||||
BUILDING,
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.model;
|
||||
package ca.uhn.fhir.jpa.bulk.export.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.provider;
|
||||
package ca.uhn.fhir.jpa.bulk.export.provider;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.bulk.provider;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -23,16 +23,15 @@ package ca.uhn.fhir.jpa.bulk.svc;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
|
@ -43,16 +42,13 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
|||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.quartz.JobExecutionContext;
|
||||
|
@ -78,9 +74,9 @@ import java.util.Set;
|
|||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP;
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.GROUP;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.SYSTEM;
|
||||
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam;
|
||||
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -136,7 +132,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
|
||||
Optional<BulkExportJobEntity> jobToProcessOpt = myTxTemplate.execute(t -> {
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkExportJobStatusEnum.SUBMITTED);
|
||||
if (submittedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
@ -158,7 +154,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
Optional<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByJobId(jobUuid);
|
||||
if (submittedJobs.isPresent()) {
|
||||
BulkExportJobEntity jobEntity = submittedJobs.get();
|
||||
jobEntity.setStatus(BulkJobStatusEnum.ERROR);
|
||||
jobEntity.setStatus(BulkExportJobStatusEnum.ERROR);
|
||||
jobEntity.setStatusMessage(e.getMessage());
|
||||
myBulkExportJobDao.save(jobEntity);
|
||||
}
|
||||
|
@ -208,8 +204,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
|
||||
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
|
||||
getBinaryDao().delete(toId(nextFile.getResourceId()));
|
||||
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), null);
|
||||
getBinaryDao().delete(toId(nextFile.getResourceId()), new SystemRequestDetails());
|
||||
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails());
|
||||
myBulkExportCollectionFileDao.deleteByPid(nextFile.getId());
|
||||
|
||||
}
|
||||
|
@ -344,7 +340,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
if (useCache) {
|
||||
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
||||
Pageable page = PageRequest.of(0, 10);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkExportJobStatusEnum.ERROR);
|
||||
if (!existing.isEmpty()) {
|
||||
return toSubmittedJobInfo(existing.iterator().next());
|
||||
}
|
||||
|
@ -373,7 +369,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
|
||||
BulkExportJobEntity job = new BulkExportJobEntity();
|
||||
job.setJobId(UUID.randomUUID().toString());
|
||||
job.setStatus(BulkJobStatusEnum.SUBMITTED);
|
||||
job.setStatus(BulkExportJobStatusEnum.SUBMITTED);
|
||||
job.setSince(since);
|
||||
job.setCreated(new Date());
|
||||
job.setRequest(request);
|
||||
|
@ -445,7 +441,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
retVal.setStatusMessage(job.getStatusMessage());
|
||||
retVal.setRequest(job.getRequest());
|
||||
|
||||
if (job.getStatus() == BulkJobStatusEnum.COMPLETE) {
|
||||
if (job.getStatus() == BulkExportJobStatusEnum.COMPLETE) {
|
||||
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
retVal.addFile()
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.bulk.svc;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
|
@ -84,12 +82,12 @@ public class BulkExportDaoSvc {
|
|||
}
|
||||
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus) {
|
||||
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus) {
|
||||
setJobToStatus(theJobUUID, theStatus, null);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus, String theStatusMessage) {
|
||||
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus, String theStatusMessage) {
|
||||
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(theJobUUID);
|
||||
if (!oJob.isPresent()) {
|
||||
ourLog.error("Job with UUID {} doesn't exist!", theJobUUID);
|
|
@ -0,0 +1,93 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
|
||||
public interface IBulkDataImportSvc {
|
||||
|
||||
/**
|
||||
* Create a new job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state (meaning it won't yet be worked on and can be added to)
|
||||
*/
|
||||
String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List<BulkImportJobFileJson> theInitialFiles);
|
||||
|
||||
/**
|
||||
* Add more files to a job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @param theFiles The files to add to the job
|
||||
*/
|
||||
void addFilesToJob(String theJobId, List<BulkImportJobFileJson> theFiles);
|
||||
|
||||
/**
|
||||
* Move a job from {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING}
|
||||
* state to {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
|
||||
* state, meaning that is is a candidate to be picked up for processing
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
*/
|
||||
void markJobAsReadyForActivation(String theJobId);
|
||||
|
||||
/**
|
||||
* This method is intended to be called from the job scheduler, and will begin execution on
|
||||
* the next job in status {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
|
||||
*
|
||||
* @return Returns {@literal true} if a job was activated
|
||||
*/
|
||||
boolean activateNextReadyJob();
|
||||
|
||||
/**
|
||||
* Updates the job status for the given job
|
||||
*/
|
||||
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus);
|
||||
|
||||
/**
|
||||
* Updates the job status for the given job
|
||||
*/
|
||||
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage);
|
||||
|
||||
/**
|
||||
* Gets the number of files available for a given Job ID
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @return The file count
|
||||
*/
|
||||
BulkImportJobJson fetchJob(String theJobId);
|
||||
|
||||
/**
|
||||
* Fetch a given file by job ID
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @param theFileIndex The index of the file within the job
|
||||
* @return The file
|
||||
*/
|
||||
BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex);
|
||||
|
||||
/**
|
||||
* Delete all input files associated with a particular job
|
||||
*/
|
||||
void deleteJobFiles(String theJobId);
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
*/
|
||||
public class ActivateBulkImportEntityStepListener implements StepExecutionListener {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkImportDaoSvc;
|
||||
|
||||
@Override
|
||||
public void beforeStep(StepExecution theStepExecution) {
|
||||
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
|
||||
if (jobUuid != null) {
|
||||
myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExitStatus afterStep(StepExecution theStepExecution) {
|
||||
return ExitStatus.EXECUTING;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import ca.uhn.fhir.util.IoUtil;
|
||||
import com.google.common.io.LineReader;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.io.StringReader;
|
||||
|
||||
@SuppressWarnings("UnstableApiUsage")
|
||||
public class BulkImportFileReader implements ItemReader<ParsedBulkImportRecord> {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUuid;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
|
||||
private int myFileIndex;
|
||||
|
||||
private StringReader myReader;
|
||||
private LineReader myLineReader;
|
||||
private int myLineIndex;
|
||||
private String myTenantName;
|
||||
|
||||
@Override
|
||||
public ParsedBulkImportRecord read() throws Exception {
|
||||
|
||||
if (myReader == null) {
|
||||
BulkImportJobFileJson file = myBulkDataImportSvc.fetchFile(myJobUuid, myFileIndex);
|
||||
myTenantName = file.getTenantName();
|
||||
myReader = new StringReader(file.getContents());
|
||||
myLineReader = new LineReader(myReader);
|
||||
}
|
||||
|
||||
String nextLine = myLineReader.readLine();
|
||||
if (nextLine == null) {
|
||||
IoUtil.closeQuietly(myReader);
|
||||
return null;
|
||||
}
|
||||
|
||||
Logs.getBatchTroubleshootingLog().debug("Reading line {} file index {} for job: {}", myLineIndex++, myFileIndex, myJobUuid);
|
||||
|
||||
IBaseResource parsed = myFhirContext.newJsonParser().parseResource(nextLine);
|
||||
return new ParsedBulkImportRecord(myTenantName, parsed);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class BulkImportFileWriter implements ItemWriter<ParsedBulkImportRecord> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class);
|
||||
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUuid;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
|
||||
private int myFileIndex;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.ROW_PROCESSING_MODE + "']}")
|
||||
private JobFileRowProcessingModeEnum myRowProcessingMode;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
@SuppressWarnings({"SwitchStatementWithTooFewBranches", "rawtypes", "unchecked"})
|
||||
@Override
|
||||
public void write(List<? extends ParsedBulkImportRecord> theItemLists) throws Exception {
|
||||
ourLog.info("Beginning bulk import write {} chunks Job[{}] FileIndex[{}]", theItemLists.size(), myJobUuid, myFileIndex);
|
||||
|
||||
for (ParsedBulkImportRecord nextItem : theItemLists) {
|
||||
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setTenantId(nextItem.getTenantName());
|
||||
|
||||
// Yeah this is a lame switch - We'll add more later I swear
|
||||
switch (myRowProcessingMode) {
|
||||
default:
|
||||
case FHIR_TRANSACTION:
|
||||
IFhirSystemDao systemDao = myDaoRegistry.getSystemDao();
|
||||
IBaseResource inputBundle = nextItem.getRowContent();
|
||||
systemDao.transactionNested(requestDetails, inputBundle);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
import org.springframework.batch.core.step.tasklet.Tasklet;
|
||||
import org.springframework.batch.repeat.RepeatStatus;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
*/
|
||||
public class BulkImportJobCloser implements Tasklet {
|
||||
|
||||
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUUID;
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
|
||||
@Override
|
||||
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
|
||||
BatchStatus executionStatus = theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus();
|
||||
if (executionStatus == BatchStatus.STARTED) {
|
||||
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.COMPLETE);
|
||||
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
|
||||
} else {
|
||||
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.ERROR, "Found job in status: " + executionStatus);
|
||||
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
|
||||
}
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,169 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchConstants;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.JobScope;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.partition.PartitionHandler;
|
||||
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.batch.repeat.CompletionPolicy;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.core.task.TaskExecutor;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* Bulk Export job.
|
||||
*/
|
||||
@Configuration
|
||||
public class BulkImportJobConfig {
|
||||
|
||||
public static final String JOB_PARAM_COMMIT_INTERVAL = "commitInterval";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR)
|
||||
private TaskExecutor myTaskExecutor;
|
||||
|
||||
@Bean(name = BULK_IMPORT_JOB_NAME)
|
||||
@Lazy
|
||||
public Job bulkImportJob() throws Exception {
|
||||
return myJobBuilderFactory.get(BULK_IMPORT_JOB_NAME)
|
||||
.validator(bulkImportJobParameterValidator())
|
||||
.start(bulkImportPartitionStep())
|
||||
.next(bulkImportCloseJobStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator bulkImportJobParameterValidator() {
|
||||
return new BulkImportJobParameterValidator();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CreateBulkImportEntityTasklet createBulkImportEntityTasklet() {
|
||||
return new CreateBulkImportEntityTasklet();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public ActivateBulkImportEntityStepListener activateBulkImportEntityStepListener() {
|
||||
return new ActivateBulkImportEntityStepListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step bulkImportPartitionStep() throws Exception {
|
||||
return myStepBuilderFactory.get("bulkImportPartitionStep")
|
||||
.partitioner("bulkImportPartitionStep", bulkImportPartitioner())
|
||||
.partitionHandler(partitionHandler())
|
||||
.listener(activateBulkImportEntityStepListener())
|
||||
.gridSize(10)
|
||||
.build();
|
||||
}
|
||||
|
||||
private PartitionHandler partitionHandler() throws Exception {
|
||||
assert myTaskExecutor != null;
|
||||
|
||||
TaskExecutorPartitionHandler retVal = new TaskExecutorPartitionHandler();
|
||||
retVal.setStep(bulkImportProcessFilesStep());
|
||||
retVal.setTaskExecutor(myTaskExecutor);
|
||||
retVal.afterPropertiesSet();
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step bulkImportCloseJobStep() {
|
||||
return myStepBuilderFactory.get("bulkImportCloseJobStep")
|
||||
.tasklet(bulkImportJobCloser())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public BulkImportJobCloser bulkImportJobCloser() {
|
||||
return new BulkImportJobCloser();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public BulkImportPartitioner bulkImportPartitioner() {
|
||||
return new BulkImportPartitioner();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Step bulkImportProcessFilesStep() {
|
||||
CompletionPolicy completionPolicy = completionPolicy();
|
||||
|
||||
return myStepBuilderFactory.get("bulkImportProcessFilesStep")
|
||||
.<ParsedBulkImportRecord, ParsedBulkImportRecord>chunk(completionPolicy)
|
||||
.reader(bulkImportFileReader())
|
||||
.writer(bulkImportFileWriter())
|
||||
.listener(bulkImportStepListener())
|
||||
.listener(completionPolicy)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public CompletionPolicy completionPolicy() {
|
||||
return new BulkImportProcessStepCompletionPolicy();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ItemWriter<ParsedBulkImportRecord> bulkImportFileWriter() {
|
||||
return new BulkImportFileWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkImportFileReader bulkImportFileReader() {
|
||||
return new BulkImportFileReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkImportStepListener bulkImportStepListener() {
|
||||
return new BulkImportStepListener();
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* This class will prevent a job from running if the UUID does not exist or is invalid.
|
||||
*/
|
||||
public class BulkImportJobParameterValidator implements JobParametersValidator {
|
||||
|
||||
@Autowired
|
||||
private IBulkImportJobDao myBulkImportJobDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
||||
@Override
|
||||
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
|
||||
if (theJobParameters == null) {
|
||||
throw new JobParametersInvalidException("This job needs Parameters: [jobUUID]");
|
||||
}
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
String errorMessage = txTemplate.execute(tx -> {
|
||||
StringBuilder errorBuilder = new StringBuilder();
|
||||
String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
|
||||
Optional<BulkImportJobEntity> oJob = myBulkImportJobDao.findByJobId(jobUUID);
|
||||
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
|
||||
errorBuilder.append("There is no persisted job that exists with UUID: ");
|
||||
errorBuilder.append(jobUUID);
|
||||
errorBuilder.append(". ");
|
||||
}
|
||||
|
||||
return errorBuilder.toString();
|
||||
});
|
||||
|
||||
if (!StringUtils.isEmpty(errorMessage)) {
|
||||
throw new JobParametersInvalidException(errorMessage);
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue