Merge branch 'master' into kh_2526_Add_Operation_Section_to_Conformance_Statements_resource_section
This commit is contained in:
commit
2f126a8d9f
|
@ -31,6 +31,7 @@ charset = utf-8
|
|||
indent_style = tab
|
||||
tab_width = 3
|
||||
indent_size = 3
|
||||
continuation_indent_size=3
|
||||
ij_java_align_consecutive_assignments = false
|
||||
ij_java_align_consecutive_variable_declarations = false
|
||||
ij_java_align_group_field_declarations = false
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.context;
|
|||
|
||||
import ca.uhn.fhir.model.api.annotation.Child;
|
||||
import ca.uhn.fhir.model.api.annotation.Description;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
|
@ -60,7 +61,7 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil
|
|||
myElementName = theElementName;
|
||||
if (theDescriptionAnnotation != null) {
|
||||
myShortDefinition = theDescriptionAnnotation.shortDefinition();
|
||||
myFormalDefinition = theDescriptionAnnotation.formalDefinition();
|
||||
myFormalDefinition = ParametersUtil.extractDescription(theDescriptionAnnotation);
|
||||
} else {
|
||||
myShortDefinition = null;
|
||||
myFormalDefinition = null;
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package ca.uhn.fhir.context.phonetic;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
|
||||
// Useful for numerical identifiers like phone numbers, address parts etc.
|
||||
// This should not be used where decimals are important. A new "quantity encoder" should be added to handle cases like that.
|
||||
public class NumericEncoder implements IPhoneticEncoder {
|
||||
@Override
|
||||
public String name() {
|
||||
return "NUMERIC";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String encode(String theString) {
|
||||
// Remove everything but the numbers
|
||||
return CharMatcher.inRange('0', '9').retainFrom(theString);
|
||||
}
|
||||
}
|
|
@ -39,7 +39,8 @@ public enum PhoneticEncoderEnum {
|
|||
METAPHONE(new ApacheEncoder("METAPHONE", new Metaphone())),
|
||||
NYSIIS(new ApacheEncoder("NYSIIS", new Nysiis())),
|
||||
REFINED_SOUNDEX(new ApacheEncoder("REFINED_SOUNDEX", new RefinedSoundex())),
|
||||
SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex()));
|
||||
SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex())),
|
||||
NUMERIC(new NumericEncoder());
|
||||
|
||||
private final IPhoneticEncoder myPhoneticEncoder;
|
||||
|
||||
|
|
|
@ -30,9 +30,16 @@ import java.lang.annotation.Target;
|
|||
* a search parameter definition in order to provide documentation for that item.
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value= {ElementType.FIELD, ElementType.TYPE, ElementType.PARAMETER, ElementType.METHOD})
|
||||
@Target(value = {ElementType.FIELD, ElementType.TYPE, ElementType.PARAMETER, ElementType.METHOD})
|
||||
public @interface Description {
|
||||
|
||||
/**
|
||||
* A description of this method or parameter
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String value() default "";
|
||||
|
||||
/**
|
||||
* Optional short name for this child
|
||||
*/
|
||||
|
@ -40,7 +47,16 @@ public @interface Description {
|
|||
|
||||
/**
|
||||
* Optional formal definition for this child
|
||||
*
|
||||
* @deprecated Use {@link #value()} instead. Deprecated in 5.4.0.
|
||||
*/
|
||||
@Deprecated
|
||||
String formalDefinition() default "";
|
||||
|
||||
/**
|
||||
* May be used to supply example values for this
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String[] example() default {};
|
||||
}
|
||||
|
|
|
@ -78,4 +78,13 @@ public @interface AddTags {
|
|||
*/
|
||||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
}
|
||||
|
|
|
@ -47,4 +47,13 @@ public @interface Create {
|
|||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
}
|
||||
|
|
|
@ -48,4 +48,15 @@ public @interface Delete {
|
|||
*/
|
||||
// NB: Read, Search (maybe others) share this annotation, so update the javadocs everywhere
|
||||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
|
||||
}
|
||||
|
|
|
@ -74,4 +74,14 @@ public @interface DeleteTags {
|
|||
*/
|
||||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
|
||||
}
|
||||
|
|
|
@ -81,4 +81,14 @@ public @interface History {
|
|||
*/
|
||||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
|
||||
}
|
||||
|
|
|
@ -132,7 +132,10 @@ public @interface Operation {
|
|||
|
||||
/**
|
||||
* If this is set to <code>true</code>, this method will be a <b>global operation</b>
|
||||
* meaning that it applies to all resource types
|
||||
* meaning that it applies to all resource types. Operations with this flag set should be
|
||||
* placed in Plain Providers (i.e. they don't need to be placed in a resource-type-specific
|
||||
* <code>IResourceProvider</code> instance) and should have a parameter annotated with
|
||||
* {@link IdParam}.
|
||||
*/
|
||||
boolean global() default false;
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ public @interface OperationParam {
|
|||
/**
|
||||
* Value for {@link OperationParam#max()} indicating no maximum
|
||||
*/
|
||||
final int MAX_UNLIMITED = -1;
|
||||
int MAX_UNLIMITED = -1;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -57,7 +57,7 @@ public @interface OperationParam {
|
|||
*
|
||||
* @since 1.5
|
||||
*/
|
||||
final int MAX_DEFAULT = -2;
|
||||
int MAX_DEFAULT = -2;
|
||||
|
||||
/**
|
||||
* The name of the parameter
|
||||
|
|
|
@ -50,4 +50,13 @@ public @interface Patch {
|
|||
// NB: Read, Search (maybe others) share this annotation, so update the javadocs everywhere
|
||||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
}
|
||||
|
|
|
@ -78,6 +78,16 @@ public @interface Search {
|
|||
// NB: Read, Search (maybe others) share this annotation method, so update the javadocs everywhere
|
||||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
|
||||
/**
|
||||
* In a REST server, should this method be invoked even if it does not have method parameters
|
||||
* which correspond to all of the URL parameters passed in by the client (default is <code>false</code>).
|
||||
|
@ -91,4 +101,5 @@ public @interface Search {
|
|||
* </p>
|
||||
*/
|
||||
boolean allowUnknownParams() default false;
|
||||
|
||||
}
|
||||
|
|
|
@ -49,4 +49,14 @@ public @interface Update {
|
|||
// NB: Read, Search (maybe others) share this annotation, so update the javadocs everywhere
|
||||
Class<? extends IResource> type() default IResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
* <p>
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
|
||||
}
|
||||
|
|
|
@ -53,6 +53,16 @@ public @interface Validate {
|
|||
// NB: Read, Search (maybe others) share this annotation, so update the javadocs everywhere
|
||||
Class<? extends IBaseResource> type() default IBaseResource.class;
|
||||
|
||||
/**
|
||||
* This method allows the return type for this method to be specified in a
|
||||
* non-type-specific way, using the text name of the resource, e.g. "Patient".
|
||||
*
|
||||
* This attribute should be populate, or {@link #type()} should be, but not both.
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
String typeName() default "";
|
||||
|
||||
/**
|
||||
* Validation mode parameter annotation for the validation mode parameter (only supported
|
||||
* in FHIR DSTU2+). Parameter must be of type {@link ValidationModeEnum}.
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
|||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
|
||||
|
@ -156,7 +157,8 @@ public class BundleBuilder {
|
|||
|
||||
// Bundle.entry.request.url
|
||||
IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
|
||||
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().getValue());
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue());
|
||||
myEntryRequestUrlChild.getMutator().setValue(request, url);
|
||||
|
||||
// Bundle.entry.request.url
|
||||
|
@ -193,6 +195,45 @@ public class BundleBuilder {
|
|||
return new CreateBuilder(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an entry containing a delete (DELETE) request.
|
||||
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
||||
*
|
||||
* Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry,
|
||||
*
|
||||
* @param theResource The resource to delete.
|
||||
*/
|
||||
public void addTransactionDeleteEntry(IBaseResource theResource) {
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
String idPart = theResource.getIdElement().toUnqualifiedVersionless().getIdPart();
|
||||
addTransactionDeleteEntry(resourceType, idPart);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an entry containing a delete (DELETE) request.
|
||||
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
||||
*
|
||||
* @param theResourceType The type resource to delete.
|
||||
* @param theIdPart the ID of the resource to delete.
|
||||
*/
|
||||
public void addTransactionDeleteEntry(String theResourceType, String theIdPart) {
|
||||
setBundleField("type", "transaction");
|
||||
IBase request = addEntryAndReturnRequest();
|
||||
IdDt idDt = new IdDt(theIdPart);
|
||||
|
||||
// Bundle.entry.request.url
|
||||
IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
|
||||
url.setValueAsString(idDt.toUnqualifiedVersionless().withResourceType(theResourceType).getValue());
|
||||
myEntryRequestUrlChild.getMutator().setValue(request, url);
|
||||
|
||||
// Bundle.entry.request.method
|
||||
IPrimitiveType<?> method = (IPrimitiveType<?>) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments());
|
||||
method.setValueAsString("DELETE");
|
||||
myEntryRequestMethodChild.getMutator().setValue(request, method);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Adds an entry for a Collection bundle type
|
||||
*/
|
||||
|
@ -250,6 +291,16 @@ public class BundleBuilder {
|
|||
return request;
|
||||
}
|
||||
|
||||
public IBase addEntryAndReturnRequest() {
|
||||
IBase entry = addEntry();
|
||||
|
||||
// Bundle.entry.request
|
||||
IBase request = myEntryRequestDef.newInstance();
|
||||
myEntryRequestChild.getMutator().setValue(entry, request);
|
||||
return request;
|
||||
|
||||
}
|
||||
|
||||
|
||||
public IBaseBundle getBundle() {
|
||||
return myBundle;
|
||||
|
|
|
@ -36,6 +36,13 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
public class ExtensionUtil {
|
||||
|
||||
/**
|
||||
* Non instantiable
|
||||
*/
|
||||
private ExtensionUtil() {
|
||||
// nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an extension with the specified URL creating one if it doesn't exist.
|
||||
*
|
||||
|
@ -46,7 +53,7 @@ public class ExtensionUtil {
|
|||
*/
|
||||
public static IBaseExtension<?, ?> getOrCreateExtension(IBase theBase, String theUrl) {
|
||||
IBaseHasExtensions baseHasExtensions = validateExtensionSupport(theBase);
|
||||
IBaseExtension extension = getExtensionByUrl(baseHasExtensions, theUrl);
|
||||
IBaseExtension<?,?> extension = getExtensionByUrl(baseHasExtensions, theUrl);
|
||||
if (extension == null) {
|
||||
extension = baseHasExtensions.addExtension();
|
||||
extension.setUrl(theUrl);
|
||||
|
@ -75,13 +82,27 @@ public class ExtensionUtil {
|
|||
*/
|
||||
public static IBaseExtension<?, ?> addExtension(IBase theBase, String theUrl) {
|
||||
IBaseHasExtensions baseHasExtensions = validateExtensionSupport(theBase);
|
||||
IBaseExtension extension = baseHasExtensions.addExtension();
|
||||
IBaseExtension<?,?> extension = baseHasExtensions.addExtension();
|
||||
if (theUrl != null) {
|
||||
extension.setUrl(theUrl);
|
||||
}
|
||||
return extension;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an extension with the specified value
|
||||
*
|
||||
* @param theBase The resource to update extension on
|
||||
* @param theUrl Extension URL
|
||||
* @param theValueType Type of the value to set in the extension
|
||||
* @param theValue Extension value
|
||||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void addExtension(FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, Object theValue) {
|
||||
IBaseExtension<?,?> ext = addExtension(theBase, theUrl);
|
||||
setExtension(theFhirContext, ext, theValueType, theValue);
|
||||
}
|
||||
|
||||
private static IBaseHasExtensions validateExtensionSupport(IBase theBase) {
|
||||
if (!(theBase instanceof IBaseHasExtensions)) {
|
||||
throw new IllegalArgumentException(String.format("Expected instance that supports extensions, but got %s", theBase));
|
||||
|
@ -118,7 +139,7 @@ public class ExtensionUtil {
|
|||
if (!hasExtension(theBase, theExtensionUrl)) {
|
||||
return false;
|
||||
}
|
||||
IBaseDatatype value = getExtensionByUrl((IBaseHasExtensions) theBase, theExtensionUrl).getValue();
|
||||
IBaseDatatype value = getExtensionByUrl(theBase, theExtensionUrl).getValue();
|
||||
if (value == null) {
|
||||
return theExtensionValue == null;
|
||||
}
|
||||
|
@ -133,7 +154,7 @@ public class ExtensionUtil {
|
|||
* @return Returns the first available extension with the specified URL, or null if such extension doesn't exist
|
||||
*/
|
||||
public static IBaseExtension<?, ?> getExtensionByUrl(IBase theBase, String theExtensionUrl) {
|
||||
Predicate<IBaseExtension> filter;
|
||||
Predicate<IBaseExtension<?,?>> filter;
|
||||
if (theExtensionUrl == null) {
|
||||
filter = (e -> true);
|
||||
} else {
|
||||
|
@ -153,7 +174,7 @@ public class ExtensionUtil {
|
|||
* @param theFilter Predicate to match the extension against
|
||||
* @return Returns all extension with the specified URL, or an empty list if such extensions do not exist
|
||||
*/
|
||||
public static List<IBaseExtension<?, ?>> getExtensionsMatchingPredicate(IBase theBase, Predicate<? super IBaseExtension> theFilter) {
|
||||
public static List<IBaseExtension<?, ?>> getExtensionsMatchingPredicate(IBase theBase, Predicate<? super IBaseExtension<?,?>> theFilter) {
|
||||
return validateExtensionSupport(theBase)
|
||||
.getExtension()
|
||||
.stream()
|
||||
|
@ -189,7 +210,7 @@ public class ExtensionUtil {
|
|||
* @param theFilter Defines which extensions should be cleared
|
||||
* @return Returns all extension that were removed
|
||||
*/
|
||||
private static List<IBaseExtension<?, ?>> clearExtensionsMatchingPredicate(IBase theBase, Predicate<? super IBaseExtension> theFilter) {
|
||||
private static List<IBaseExtension<?, ?>> clearExtensionsMatchingPredicate(IBase theBase, Predicate<? super IBaseExtension<?,?>> theFilter) {
|
||||
List<IBaseExtension<?, ?>> retVal = getExtensionsMatchingPredicate(theBase, theFilter);
|
||||
validateExtensionSupport(theBase)
|
||||
.getExtension()
|
||||
|
@ -205,7 +226,7 @@ public class ExtensionUtil {
|
|||
* @return Returns all extension with the specified URL, or an empty list if such extensions do not exist
|
||||
*/
|
||||
public static List<IBaseExtension<?, ?>> getExtensionsByUrl(IBaseHasExtensions theBase, String theExtensionUrl) {
|
||||
Predicate<IBaseExtension> urlEqualityPredicate = e -> theExtensionUrl.equals(e.getUrl());
|
||||
Predicate<IBaseExtension<?,?>> urlEqualityPredicate = e -> theExtensionUrl.equals(e.getUrl());
|
||||
return getExtensionsMatchingPredicate(theBase, urlEqualityPredicate);
|
||||
}
|
||||
|
||||
|
@ -216,8 +237,8 @@ public class ExtensionUtil {
|
|||
* @param theValue The value to set
|
||||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theValue) {
|
||||
setExtension(theFhirContext, theExtension, "string", (Object) theValue);
|
||||
public static void setExtension(FhirContext theFhirContext, IBaseExtension<?,?> theExtension, String theValue) {
|
||||
setExtension(theFhirContext, theExtension, "string", theValue);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -228,7 +249,7 @@ public class ExtensionUtil {
|
|||
* @param theValue The value to set
|
||||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theExtensionType, Object theValue) {
|
||||
public static void setExtension(FhirContext theFhirContext, IBaseExtension<?,?> theExtension, String theExtensionType, Object theValue) {
|
||||
theExtension.setValue(TerserUtil.newElement(theFhirContext, theExtensionType, theValue));
|
||||
}
|
||||
|
||||
|
@ -241,7 +262,7 @@ public class ExtensionUtil {
|
|||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtensionAsString(FhirContext theFhirContext, IBase theBase, String theUrl, String theValue) {
|
||||
IBaseExtension ext = getOrCreateExtension(theBase, theUrl);
|
||||
IBaseExtension<?,?> ext = getOrCreateExtension(theBase, theUrl);
|
||||
setExtension(theFhirContext, ext, theValue);
|
||||
}
|
||||
|
||||
|
@ -255,7 +276,7 @@ public class ExtensionUtil {
|
|||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtension(FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, Object theValue) {
|
||||
IBaseExtension ext = getOrCreateExtension(theBase, theUrl);
|
||||
IBaseExtension<?,?> ext = getOrCreateExtension(theBase, theUrl);
|
||||
setExtension(theFhirContext, ext, theValueType, theValue);
|
||||
}
|
||||
|
||||
|
@ -266,7 +287,7 @@ public class ExtensionUtil {
|
|||
* @param theRightExtension : Extension to be evaluated #2
|
||||
* @return Result of the comparison
|
||||
*/
|
||||
public static boolean equals(IBaseExtension theLeftExtension, IBaseExtension theRightExtension) {
|
||||
public static boolean equals(IBaseExtension<?,?> theLeftExtension, IBaseExtension<?,?> theRightExtension) {
|
||||
return TerserUtil.equals(theLeftExtension, theRightExtension);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,7 +116,13 @@ public class HapiExtensions {
|
|||
*/
|
||||
public static final String ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL = "https://hapifhir.org/associated-patient-golden-resource/";
|
||||
|
||||
/**
|
||||
/**
|
||||
* This extension provides an example value for a parameter value for
|
||||
* a REST operation (eg for an OperationDefinition)
|
||||
*/
|
||||
public static final String EXT_OP_PARAMETER_EXAMPLE_VALUE = "http://hapifhir.io/fhir/StructureDefinition/op-parameter-example-value";
|
||||
|
||||
/**
|
||||
* Non instantiable
|
||||
*/
|
||||
private HapiExtensions() {
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
|||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.model.api.annotation.Description;
|
||||
import ca.uhn.fhir.model.primitive.StringDt;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
|
@ -34,8 +35,13 @@ import org.hl7.fhir.instance.model.api.IBaseReference;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.AnnotatedElement;
|
||||
import java.lang.reflect.Method;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@ -43,6 +49,7 @@ import java.util.function.Function;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
/**
|
||||
* Utilities for dealing with parameters resources in a version indepenedent way
|
||||
|
@ -418,4 +425,60 @@ public class ParametersUtil {
|
|||
.findFirst();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static String extractDescription(AnnotatedElement theType) {
|
||||
Description description = theType.getAnnotation(Description.class);
|
||||
if (description != null) {
|
||||
return extractDescription(description);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static String extractDescription(Description desc) {
|
||||
String description = desc.value();
|
||||
if (isBlank(description)) {
|
||||
description = desc.formalDefinition();
|
||||
}
|
||||
if (isBlank(description)) {
|
||||
description = desc.shortDefinition();
|
||||
}
|
||||
return defaultIfBlank(description, null);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static String extractShortDefinition(AnnotatedElement theType) {
|
||||
Description description = theType.getAnnotation(Description.class);
|
||||
if (description != null) {
|
||||
return defaultIfBlank(description.shortDefinition(), null);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static String extractDescription(Annotation[] theParameterAnnotations) {
|
||||
for (Annotation next : theParameterAnnotations) {
|
||||
if (next instanceof Description) {
|
||||
return extractDescription((Description)next);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static List<String> extractExamples(Annotation[] theParameterAnnotations) {
|
||||
ArrayList<String> retVal = null;
|
||||
for (Annotation next : theParameterAnnotations) {
|
||||
if (next instanceof Description) {
|
||||
String[] examples = ((Description) next).example();
|
||||
if (examples.length > 0) {
|
||||
if (retVal == null) {
|
||||
retVal = new ArrayList<>();
|
||||
}
|
||||
retVal.addAll(Arrays.asList(examples));
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
|||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -48,9 +49,15 @@ public final class TerserUtil {
|
|||
|
||||
private static final String EQUALS_DEEP = "equalsDeep";
|
||||
|
||||
/**
|
||||
* Exclude for id, identifier and meta fields of a resource.
|
||||
*/
|
||||
public static final Collection<String> IDS_AND_META_EXCLUDES =
|
||||
Collections.unmodifiableSet(Stream.of("id", "identifier", "meta").collect(Collectors.toSet()));
|
||||
|
||||
/**
|
||||
* Exclusion predicate for id, identifier, meta fields.
|
||||
*/
|
||||
public static final Predicate<String> EXCLUDE_IDS_AND_META = new Predicate<String>() {
|
||||
@Override
|
||||
public boolean test(String s) {
|
||||
|
@ -58,6 +65,25 @@ public final class TerserUtil {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Exclusion predicate for id/identifier, meta and fields with empty values. This ensures that source / target resources,
|
||||
* empty source fields will not results in erasure of target fields.
|
||||
*/
|
||||
public static final Predicate<Triple<BaseRuntimeChildDefinition, IBase, IBase>> EXCLUDE_IDS_META_AND_EMPTY = new Predicate<Triple<BaseRuntimeChildDefinition, IBase, IBase>>() {
|
||||
@Override
|
||||
public boolean test(Triple<BaseRuntimeChildDefinition, IBase, IBase> theTriple) {
|
||||
if (!EXCLUDE_IDS_AND_META.test(theTriple.getLeft().getElementName())) {
|
||||
return false;
|
||||
}
|
||||
BaseRuntimeChildDefinition childDefinition = theTriple.getLeft();
|
||||
boolean isSourceFieldEmpty = childDefinition.getAccessor().getValues(theTriple.getMiddle()).isEmpty();
|
||||
return !isSourceFieldEmpty;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Exclusion predicate for keeping all fields.
|
||||
*/
|
||||
public static final Predicate<String> INCLUDE_ALL = new Predicate<String>() {
|
||||
@Override
|
||||
public boolean test(String s) {
|
||||
|
@ -235,24 +261,35 @@ public final class TerserUtil {
|
|||
}
|
||||
|
||||
/**
|
||||
* Replaces all fields that test positive by the given inclusion strategy. <code>theTo</code> will contain a copy of the
|
||||
* Replaces all fields that have matching field names by the given inclusion strategy. <code>theTo</code> will contain a copy of the
|
||||
* values from <code>theFrom</code> instance.
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theFrom The resource to merge the fields from
|
||||
* @param theTo The resource to merge the fields into
|
||||
* @param inclusionStrategy Inclusion strategy that checks if a given field should be replaced by checking {@link Predicate#test(Object)}
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theFrom The resource to merge the fields from
|
||||
* @param theTo The resource to merge the fields into
|
||||
* @param theFieldNameInclusion Inclusion strategy that checks if a given field should be replaced
|
||||
*/
|
||||
public static void replaceFields(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, Predicate<String> inclusionStrategy) {
|
||||
FhirTerser terser = theFhirContext.newTerser();
|
||||
public static void replaceFields(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, Predicate<String> theFieldNameInclusion) {
|
||||
Predicate<Triple<BaseRuntimeChildDefinition, IBase, IBase>> predicate
|
||||
= (t) -> theFieldNameInclusion.test(t.getLeft().getElementName());
|
||||
replaceFieldsByPredicate(theFhirContext, theFrom, theTo, predicate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces fields on theTo resource that test positive by the given predicate. <code>theTo</code> will contain a copy of the
|
||||
* values from <code>theFrom</code> for which predicate tests positive. Please note that composite fields will be replaced fully.
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theFrom The resource to merge the fields from
|
||||
* @param theTo The resource to merge the fields into
|
||||
* @param thePredicate Predicate that checks if a given field should be replaced
|
||||
*/
|
||||
public static void replaceFieldsByPredicate(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, Predicate<Triple<BaseRuntimeChildDefinition, IBase, IBase>> thePredicate) {
|
||||
RuntimeResourceDefinition definition = theFhirContext.getResourceDefinition(theFrom);
|
||||
for (BaseRuntimeChildDefinition childDefinition : definition.getChildrenAndExtension()) {
|
||||
if (!inclusionStrategy.test(childDefinition.getElementName())) {
|
||||
continue;
|
||||
if (thePredicate.test(Triple.of(childDefinition, theFrom, theTo))) {
|
||||
replaceField(theFrom, theTo, childDefinition);
|
||||
}
|
||||
|
||||
replaceField(theFrom, theTo, childDefinition);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -277,14 +314,11 @@ public final class TerserUtil {
|
|||
* @param theTo The resource to replace the field on
|
||||
*/
|
||||
public static void replaceField(FhirContext theFhirContext, String theFieldName, IBaseResource theFrom, IBaseResource theTo) {
|
||||
replaceField(theFhirContext, theFhirContext.newTerser(), theFieldName, theFrom, theTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #replaceField(FhirContext, String, IBaseResource, IBaseResource)} instead
|
||||
*/
|
||||
public static void replaceField(FhirContext theFhirContext, FhirTerser theTerser, String theFieldName, IBaseResource theFrom, IBaseResource theTo) {
|
||||
replaceField(theFrom, theTo, getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theFrom));
|
||||
RuntimeResourceDefinition definition = theFhirContext.getResourceDefinition(theFrom);
|
||||
if (definition == null) {
|
||||
throw new IllegalArgumentException(String.format("Field %s does not exist in %s", theFieldName, theFrom));
|
||||
}
|
||||
replaceField(theFrom, theTo, theFhirContext.getResourceDefinition(theFrom).getChildByName(theFieldName));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -301,7 +335,7 @@ public final class TerserUtil {
|
|||
|
||||
/**
|
||||
* Sets the provided field with the given values. This method will add to the collection of existing field values
|
||||
* in case of multiple cardinality. Use {@link #clearField(FhirContext, FhirTerser, String, IBaseResource, IBase...)}
|
||||
* in case of multiple cardinality. Use {@link #clearField(FhirContext, String, IBaseResource)}
|
||||
* to remove values before setting
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
|
@ -315,7 +349,7 @@ public final class TerserUtil {
|
|||
|
||||
/**
|
||||
* Sets the provided field with the given values. This method will add to the collection of existing field values
|
||||
* in case of multiple cardinality. Use {@link #clearField(FhirContext, FhirTerser, String, IBaseResource, IBase...)}
|
||||
* in case of multiple cardinality. Use {@link #clearField(FhirContext, String, IBaseResource)}
|
||||
* to remove values before setting
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
|
@ -370,10 +404,26 @@ public final class TerserUtil {
|
|||
setFieldByFhirPath(theFhirContext.newTerser(), theFhirPath, theResource, theValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns field values ant the specified FHIR path from the resource.
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theFhirPath The FHIR path to get the field from
|
||||
* @param theResource The resource from which the value should be retrieved
|
||||
* @return Returns the list of field values at the given FHIR path
|
||||
*/
|
||||
public static List<IBase> getFieldByFhirPath(FhirContext theFhirContext, String theFhirPath, IBase theResource) {
|
||||
return theFhirContext.newTerser().getValues(theResource, theFhirPath, false, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first available field value at the specified FHIR path from the resource.
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theFhirPath The FHIR path to get the field from
|
||||
* @param theResource The resource from which the value should be retrieved
|
||||
* @return Returns the first available value or null if no values can be retrieved
|
||||
*/
|
||||
public static IBase getFirstFieldByFhirPath(FhirContext theFhirContext, String theFhirPath, IBase theResource) {
|
||||
List<IBase> values = getFieldByFhirPath(theFhirContext, theFhirPath, theResource);
|
||||
if (values == null || values.isEmpty()) {
|
||||
|
|
|
@ -68,7 +68,11 @@ public enum VersionEnum {
|
|||
V5_2_0,
|
||||
V5_2_1,
|
||||
V5_3_0,
|
||||
V5_4_0;
|
||||
V5_3_1,
|
||||
V5_3_2,
|
||||
V5_3_3,
|
||||
V5_4_0,
|
||||
;
|
||||
|
||||
public static VersionEnum latestVersion() {
|
||||
VersionEnum[] values = VersionEnum.values();
|
||||
|
|
|
@ -51,6 +51,7 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaBaseValidator.class);
|
||||
private static final Set<String> SCHEMA_NAMES;
|
||||
private static boolean ourJaxp15Supported;
|
||||
|
||||
static {
|
||||
HashSet<String> sn = new HashSet<>();
|
||||
|
@ -132,7 +133,9 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
* https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
|
||||
*/
|
||||
schemaFactory.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
|
||||
ourJaxp15Supported = true;
|
||||
} catch (SAXNotRecognizedException e) {
|
||||
ourJaxp15Supported = false;
|
||||
ourLog.warn("Jaxp 1.5 Support not found.", e);
|
||||
}
|
||||
schema = schemaFactory.newSchema(new Source[]{baseSource});
|
||||
|
@ -216,4 +219,8 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
|
||||
}
|
||||
|
||||
public static boolean isJaxp15Supported() {
|
||||
return ourJaxp15Supported;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -68,8 +68,8 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri
|
|||
|
||||
# JPA Messages
|
||||
|
||||
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
|
||||
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
|
||||
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
|
||||
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
package ca.uhn.fhir.context.phonetic;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
class PhoneticEncoderTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(PhoneticEncoderTest.class);
|
||||
|
@ -23,7 +23,11 @@ class PhoneticEncoderTest {
|
|||
public void testEncodeAddress(PhoneticEncoderEnum thePhoneticEncoderEnum) {
|
||||
String encoded = thePhoneticEncoderEnum.getPhoneticEncoder().encode(ADDRESS_LINE);
|
||||
ourLog.info("{}: {}", thePhoneticEncoderEnum.name(), encoded);
|
||||
assertThat(encoded, startsWith(NUMBER + " "));
|
||||
assertThat(encoded, endsWith(" " + SUITE));
|
||||
if (thePhoneticEncoderEnum == PhoneticEncoderEnum.NUMERIC) {
|
||||
assertEquals(NUMBER + SUITE, encoded);
|
||||
} else {
|
||||
assertThat(encoded, startsWith(NUMBER + " "));
|
||||
assertThat(encoded, endsWith(" " + SUITE));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,5 +11,4 @@ public class PlaceholderTest {
|
|||
public void testPass() {
|
||||
// nothing
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -41,6 +41,11 @@
|
|||
<artifactId>hapi-fhir-server-mdm</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>hapi-fhir-server-openapi</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>hapi-fhir-validation</artifactId>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
|
|||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
|
@ -70,6 +71,9 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
|||
protected static final String VERBOSE_LOGGING_PARAM = "l";
|
||||
protected static final String VERBOSE_LOGGING_PARAM_LONGOPT = "logging";
|
||||
protected static final String VERBOSE_LOGGING_PARAM_DESC = "If specified, verbose logging will be used.";
|
||||
protected static final int DEFAULT_THREAD_COUNT = 10;
|
||||
protected static final String THREAD_COUNT = "thread-count";
|
||||
|
||||
// TODO: Don't use qualified names for loggers in HAPI CLI.
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseCommand.class);
|
||||
protected FhirContext myFhirCtx;
|
||||
|
@ -87,6 +91,11 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
|||
addOptionalOption(theOptions, null, BEARER_TOKEN_PARAM_LONGOPT, BEARER_TOKEN_PARAM_NAME, BEARER_TOKEN_PARAM_DESC);
|
||||
}
|
||||
|
||||
protected void addThreadCountOption(Options theOptions) {
|
||||
addOptionalOption(theOptions, null, THREAD_COUNT, "count", "If specified, this argument specifies the number of worker threads used (default is " + DEFAULT_THREAD_COUNT + ")");
|
||||
}
|
||||
|
||||
|
||||
protected String promptUser(String thePrompt) throws ParseException {
|
||||
System.out.print(ansi().bold().fgBrightDefault());
|
||||
System.out.print(thePrompt);
|
||||
|
@ -309,6 +318,12 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
|||
return getFhirContext().getResourceDefinition("Bundle").getImplementingClass(IBaseBundle.class);
|
||||
}
|
||||
|
||||
protected int getThreadCount(CommandLine theCommandLine) throws ParseException {
|
||||
Integer parallelismThreadCount = getAndParsePositiveIntegerParam(theCommandLine, THREAD_COUNT);
|
||||
parallelismThreadCount = ObjectUtils.defaultIfNull(parallelismThreadCount, DEFAULT_THREAD_COUNT);
|
||||
return parallelismThreadCount.intValue();
|
||||
}
|
||||
|
||||
public abstract String getCommandDescription();
|
||||
|
||||
public abstract String getCommandName();
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation;
|
|||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
@ -64,10 +65,7 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding {
|
|||
|
||||
Description description = theMethod.getAnnotation(Description.class);
|
||||
if (description != null) {
|
||||
myDescription = description.formalDefinition();
|
||||
if (isBlank(myDescription)) {
|
||||
myDescription = description.shortDefinition();
|
||||
}
|
||||
myDescription = ParametersUtil.extractDescription(description);
|
||||
}
|
||||
if (isBlank(myDescription)) {
|
||||
myDescription = null;
|
||||
|
|
|
@ -33,6 +33,7 @@ import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation;
|
|||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -59,15 +60,7 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding {
|
|||
this.myQueryName = StringUtils.defaultIfBlank(search.queryName(), null);
|
||||
this.myCompartmentName = StringUtils.defaultIfBlank(search.compartmentName(), null);
|
||||
this.myIdParamIndex = ParameterUtil.findIdParameterIndex(theMethod, getContext());
|
||||
|
||||
Description desc = theMethod.getAnnotation(Description.class);
|
||||
if (desc != null) {
|
||||
if (isNotBlank(desc.formalDefinition())) {
|
||||
myDescription = StringUtils.defaultIfBlank(desc.formalDefinition(), null);
|
||||
} else {
|
||||
myDescription = StringUtils.defaultIfBlank(desc.shortDefinition(), null);
|
||||
}
|
||||
}
|
||||
this.myDescription = ParametersUtil.extractDescription(theMethod);
|
||||
|
||||
/*
|
||||
* Check for parameter combinations and names that are invalid
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -55,6 +55,11 @@
|
|||
<artifactId>hapi-fhir-jpaserver-base</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-server-openapi</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
|
@ -78,13 +83,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu2</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-subscription</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -101,7 +106,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-testpage-overlay</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<classifier>classes</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -118,7 +123,7 @@
|
|||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
package ca.uhn.hapi.fhir.docs;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Docs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Composition;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CreateCompositionAndGenerateDocument {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(CreateCompositionAndGenerateDocument.class);
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
// START SNIPPET: CreateCompositionAndGenerateDocument
|
||||
FhirContext ctx = FhirContext.forR4();
|
||||
IGenericClient client = ctx.newRestfulGenericClient("http://hapi.fhir.org/baseR4");
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("PATIENT-ABC");
|
||||
patient.setActive(true);
|
||||
client.update().resource(patient).execute();
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.setId("OBSERVATION-ABC");
|
||||
observation.setSubject(new Reference("Patient/PATIENT-ABC"));
|
||||
observation.setStatus(Observation.ObservationStatus.FINAL);
|
||||
client.update().resource(observation).execute();
|
||||
|
||||
Composition composition = new Composition();
|
||||
composition.setId("COMPOSITION-ABC");
|
||||
composition.setSubject(new Reference("Patient/PATIENT-ABC"));
|
||||
composition.addSection().setFocus(new Reference("Observation/OBSERVATION-ABC"));
|
||||
client.update().resource(composition).execute();
|
||||
|
||||
Bundle document = client
|
||||
.operation()
|
||||
.onInstance("Composition/COMPOSITION-ABC")
|
||||
.named("$document")
|
||||
.withNoParameters(Parameters.class)
|
||||
.returnResourceType(Bundle.class)
|
||||
.execute();
|
||||
|
||||
ourLog.info("Document bundle: {}", ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(document));
|
||||
// END SNIPPET: CreateCompositionAndGenerateDocument
|
||||
|
||||
}
|
||||
}
|
|
@ -23,6 +23,7 @@ package ca.uhn.hapi.fhir.docs;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.rest.api.PreferHandlingEnum;
|
||||
import ca.uhn.fhir.rest.openapi.OpenApiInterceptor;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.*;
|
||||
|
@ -65,6 +66,24 @@ public class ServletExamples {
|
|||
}
|
||||
// END SNIPPET: loggingInterceptor
|
||||
|
||||
// START SNIPPET: OpenApiInterceptor
|
||||
@WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server")
|
||||
public class RestfulServerWithOpenApi extends RestfulServer {
|
||||
|
||||
@Override
|
||||
protected void initialize() throws ServletException {
|
||||
|
||||
// ... define your resource providers here ...
|
||||
|
||||
// Now register the interceptor
|
||||
OpenApiInterceptor openApiInterceptor = new OpenApiInterceptor();
|
||||
registerInterceptor(openApiInterceptor);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
// END SNIPPET: OpenApiInterceptor
|
||||
|
||||
// START SNIPPET: validatingInterceptor
|
||||
@WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server")
|
||||
public class ValidatingServerWithLogging extends RestfulServer {
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2021-03-11"
|
||||
codename: "Odyssey"
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2021-04-14"
|
||||
codename: "Odyssey"
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2021-04-26"
|
||||
codename: "Odyssey"
|
|
@ -4,3 +4,4 @@ issue: 2407
|
|||
title: "When using the JPA server in partitioned mode with a partition interceptor, the interceptor is now called even for
|
||||
resource types that can not be placed in a non-default partition (e.g. SearchParameter, CodeSystem, etc.). The interceptor
|
||||
may return null or default in this case, but can include a non-null partition date if needed."
|
||||
backport: 5.3.1
|
||||
|
|
|
@ -3,3 +3,4 @@ type: fix
|
|||
issue: 2417
|
||||
title: "A NullPointerException was corrected when indexing resources containing an indexed Period field that
|
||||
had a start but not an end defined."
|
||||
backport: 5.3.3
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2515
|
||||
title: "Fixed issues with application of survivorship rules when matching golden record to a single resource"
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2533
|
||||
title: "When issuing a request for a specific Resource and also specifying an _include param,
|
||||
the referenced resource is not returned when there is only 1 version of the referenced resource available.
|
||||
When there are more than 1 versions available, the referenced resource is returned in the response bundle."
|
||||
backport: 5.3.2
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2535
|
||||
title: "An issue with package installer involving logical StructureDefinition resources was fixed. Package registry will no
|
||||
longer attempt to generate a snapshot for logical StructureDefinition resources if one is not already provided in the
|
||||
resource definition."
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2543
|
||||
title: "When issuing a request for a specific Resource and also specifying an _include param,
|
||||
the proper historical referenced resource is not returned when there are more than 1 versions of the
|
||||
referenced resource available, after the reference has been changed from the original version 1 to some other version.
|
||||
When there are more than 1 versions available, and the referring resource had previously referred to version 1
|
||||
but now refers to version 4, the resource returned in the response bundle is for version 1."
|
||||
backport: 5.3.2
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2547
|
||||
title: "Added new NUMERIC mdm matcher for matching phone numbers. Also added NUMERIC phonetic encoder to support
|
||||
adding NUMERIC encoded search parameter (e.g. if searching for matching phone numbers is required by mdm candidate searching)."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2556
|
||||
title: "Fixed a bug which would cause Bulk Export to fail when run in a partitioned environment."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2560
|
||||
title: "A new interceptor called `OpenApiInterceptor` has been added. This interceptor can be registered against FHIR Servers to
|
||||
automatically add support for OpenAPI / Swagger."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2571
|
||||
title: "Added support for deleting resources to BundleBuilder via method `addTransactionDeleteEntry`."
|
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
- item:
|
||||
type: "add"
|
||||
title: "The version of a few dependencies have been bumped to the latest versions
|
||||
(dependent HAPI modules listed in brackets):
|
||||
<ul>
|
||||
<li>Commons-Lang3 (Core): 3.9 -> 3.12.0</li>
|
||||
<li>Commons-Text (Core): 1.7 -> 1.9</li>
|
||||
<li>Commons-Codec (Core): 1.14 -> 1.15</li>
|
||||
<li>Commons-IO (Core): 2.6 -> 2.8.0</li>
|
||||
<li>Guava (Core): 30.1-jre -> 30.1.1-jre</li>
|
||||
<li>Jackson (Core): 2.12.1 -> 2.12.3</li>
|
||||
<li>Woodstox (Core): 6.2.3 -> 6.2.5</li>
|
||||
<li>Apache Jena (Core/RDF): 3.16.0 -> 3.17.0</li>
|
||||
<li>Gson (JPA): 2.8.5 -> 2.8.6</li>
|
||||
<li>Caffeine (JPA): 2.7.0 -> 3.0.1</li>
|
||||
<li>Hibernate (JPA): 5.4.26.Final -> 5.4.30.Final</li>
|
||||
<li>Hibernate Search (JPA): 6.0.0.Final -> 6.0.2.Final</li>
|
||||
<li>Spring (JPA): 5.3.3 -> 5.3.6</li>
|
||||
<li>Spring Batch (JPA): 4.2.3.RELEASE -> 4.3.2</li>
|
||||
<li>Spring Data (JPA): 2.4.2 -> 2.4.7</li>
|
||||
<li>Commons DBCP2 (JPA): 2.7.0 -> 2.8.0</li>
|
||||
<li>Thymeleaf (Testpage Overlay): 3.0.11.RELEASE -> 3.0.12.RELEASE</li>
|
||||
<li>JAnsi (CLI): 2.1.1 -> 2.3.2</li>
|
||||
<li>JArchivelib (CLI): 1.0.0 -> 1.1.0</li>
|
||||
</ul>
|
||||
"
|
|
@ -161,3 +161,13 @@ This following example shows how to load all pages of a bundle by fetching each
|
|||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleFetcher.java|loadAll}}
|
||||
```
|
||||
|
||||
# Create Composition and Generate Document
|
||||
|
||||
This example shows how to generate a Composition resource with two linked resources, then apply the server `$document` operation to generate a document based on this composition.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/CreateCompositionAndGenerateDocument.java|CreateCompositionAndGenerateDocument}}
|
||||
```
|
||||
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ page.server_plain.web_testpage_overlay=Web Testpage Overlay
|
|||
page.server_plain.multitenancy=Multitenancy
|
||||
page.server_plain.jax_rs=JAX-RS Support
|
||||
page.server_plain.customizing_the_capabilitystatement=Customizing the CapabilityStatement
|
||||
page.server_plain.openapi=OpenAPI / Swagger
|
||||
|
||||
section.server_jpa.title=JPA Server
|
||||
page.server_jpa.introduction=Introduction
|
||||
|
|
|
@ -196,6 +196,11 @@ Some security audit tools require that servers return an HTTP 405 if an unsuppor
|
|||
* [BanUnsupportedHttpMethodsInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/BanUnsupportedHttpMethodsInterceptor.html)
|
||||
* [BanUnsupportedHttpMethodsInterceptor Source](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BanUnsupportedHttpMethodsInterceptor.java)
|
||||
|
||||
# Server: OpenAPI / Swagger Support
|
||||
|
||||
An interceptor can be registered against your server that enables support for OpenAPI (aka Swagger) automatically. See [OpenAPI](/docs/server_plain/openapi.html) for more information.
|
||||
|
||||
|
||||
# Subscription: Subscription Debug Log Interceptor
|
||||
|
||||
When using Subscriptions, the debug log interceptor can be used to add a number of additional lines to the server logs showing the internals of the subscription processing pipeline.
|
||||
|
|
|
@ -292,10 +292,10 @@ The following algorithms are currently supported:
|
|||
<td>Gail = Gael, Gail != Gale, Thomas != Tom</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CAVERPHONE1</td>
|
||||
<td>CAVERPHONE2</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
<a href="https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/language/Caverphone1.html">Apache Caverphone1</a>
|
||||
<a href="https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/language/Caverphone2.html">Apache Caverphone2</a>
|
||||
</td>
|
||||
<td>Gail = Gael, Gail = Gale, Thomas != Tom</td>
|
||||
</tr>
|
||||
|
@ -379,6 +379,14 @@ The following algorithms are currently supported:
|
|||
</td>
|
||||
<td>2019-12,Month = 2019-12-19,Day</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NUMERIC</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
Remove all non-numeric characters from the string before comparing.
|
||||
</td>
|
||||
<td>4169671111 = (416) 967-1111</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NAME_ANY_ORDER</td>
|
||||
<td>matcher</td>
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
# OpenAPI / Swagger Support
|
||||
|
||||
In HAPI FHIR, support for OpenAPI (aka Swagger) is supported via the [OpenApiInterceptor](/hapi-fhir/apidocs/hapi-fhir-server-openapi/ca/uhn/fhir/rest/openapi/OpenApiInterceptor.html).
|
||||
|
||||
Note that this interceptor supports servers using the RestfulServer (aka HAPI FHIR Plain Server and JPA Server), and does not currently support JAX-RS servers.
|
||||
|
||||
When this interceptor is registered against the server, it performs the following 3 tasks:
|
||||
|
||||
### System Functionality
|
||||
|
||||
* OpenAPI 3.0 Documentation will be served at `[baseUrl]/api-docs`. This documentation is generated by the interceptor using information from the server's CapabilityStatement as well as from its automatically generated OperationDefinitions.
|
||||
|
||||
### User Functionality
|
||||
|
||||
* Anytime a user using a browser navigates to the Base URL of the server, they will be automatically redirected to `[baseUrl]/swagger-ui/`
|
||||
|
||||
* A customized version of the [Swagger UI](https://swagger.io/tools/swagger-ui/) tool will be served at `[baseUrl]/swagger-ui/`
|
||||
|
||||
# Enabling OpenAPI
|
||||
|
||||
The HAPI FHIR OpenAPI functionality is supplied in a dedicated module called `hapi-fhir-server-openapi`. To enable this functionality you must first include this module in your project. For example, Maven users should include the following dependency:
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-server-openapi</artifactId>
|
||||
<version>VERSION</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
You then simply have to register the interceptor against your RestfulServer instance.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ServletExamples.java|OpenApiInterceptor}}
|
||||
```
|
||||
|
||||
# Demonstration
|
||||
|
||||
See the HAPI FHIR Test Server for a demonstration of HAPI FHIR OpenAPI functionality: http://hapi.fhir.org/baseR4/swagger-ui/
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -36,6 +36,11 @@
|
|||
<artifactId>hapi-fhir-server-mdm</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-server-openapi</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-client</artifactId>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -67,6 +67,18 @@ public interface IFhirSystemDao<T, MT> extends IDao {
|
|||
*/
|
||||
IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage);
|
||||
|
||||
/**
|
||||
* Executes a FHIR transaction using a new database transaction. This method must
|
||||
* not be called from within a DB transaction.
|
||||
*/
|
||||
T transaction(RequestDetails theRequestDetails, T theResources);
|
||||
|
||||
/**
|
||||
* Executes a FHIR transaction nested inside the current database transaction.
|
||||
* This form of the transaction processor can handle write operations only (no reads)
|
||||
*/
|
||||
default T transactionNested(RequestDetails theRequestDetails, T theResources) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -577,7 +577,14 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-server-openapi</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||
<artifactId>caffeine</artifactId>
|
||||
</dependency>
|
||||
|
|
|
@ -20,17 +20,20 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
@Configuration
|
||||
//When you define a new batch job, add it here.
|
||||
@Import({
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class,
|
||||
BulkImportJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
|
||||
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
|
||||
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
|
||||
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.util.ExtensionUtil;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.api;
|
||||
package ca.uhn.fhir.jpa.bulk.export.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.api;
|
||||
package ca.uhn.fhir.jpa.bulk.export.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.api;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
|
@ -50,7 +50,7 @@ public interface IBulkDataExportSvc {
|
|||
|
||||
class JobInfo {
|
||||
private String myJobId;
|
||||
private BulkJobStatusEnum myStatus;
|
||||
private BulkExportJobStatusEnum myStatus;
|
||||
private List<FileEntry> myFiles;
|
||||
private String myRequest;
|
||||
private Date myStatusTime;
|
||||
|
@ -90,11 +90,11 @@ public interface IBulkDataExportSvc {
|
|||
|
||||
}
|
||||
|
||||
public BulkJobStatusEnum getStatus() {
|
||||
public BulkExportJobStatusEnum getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
public JobInfo setStatus(BulkJobStatusEnum theStatus) {
|
||||
public JobInfo setStatus(BulkExportJobStatusEnum theStatus) {
|
||||
myStatus = theStatus;
|
||||
return this;
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
|||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -102,7 +101,7 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
|
|||
myPidIterator = getResourcePidIterator();
|
||||
}
|
||||
|
||||
abstract Iterator<ResourcePersistentId> getResourcePidIterator();
|
||||
protected abstract Iterator<ResourcePersistentId> getResourcePidIterator();
|
||||
|
||||
protected List<SearchParameterMap> createSearchParameterMapsForResourceType() {
|
||||
BulkExportJobEntity jobEntity = getJobEntity();
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,16 +20,12 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
|
@ -43,7 +39,7 @@ public class BulkExportCreateEntityStepListener implements StepExecutionListener
|
|||
public void beforeStep(StepExecution theStepExecution) {
|
||||
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID");
|
||||
if (jobUuid != null) {
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.BUILDING);
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.BUILDING);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
|
@ -55,7 +55,7 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio
|
|||
}
|
||||
assert isNotBlank(jobUuid);
|
||||
String exitDescription = theStepExecution.getExitStatus().getExitDescription();
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.ERROR, exitDescription);
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.ERROR, exitDescription);
|
||||
}
|
||||
return theStepExecution.getExitStatus();
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
|
@ -44,9 +44,9 @@ public class BulkExportJobCloser implements Tasklet {
|
|||
@Override
|
||||
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
|
||||
if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) {
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.COMPLETE);
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.COMPLETE);
|
||||
} else {
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.ERROR);
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.ERROR);
|
||||
}
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -35,8 +35,6 @@ import org.springframework.batch.core.configuration.annotation.JobScope;
|
|||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.batch.item.support.CompositeItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -59,6 +57,7 @@ public class BulkExportJobConfig {
|
|||
public static final String GROUP_ID_PARAMETER = "groupId";
|
||||
public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes";
|
||||
public static final int CHUNK_SIZE = 100;
|
||||
public static final String JOB_DESCRIPTION = "jobDescription";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
@ -90,9 +89,9 @@ public class BulkExportJobConfig {
|
|||
@Lazy
|
||||
public Job bulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(partitionStep())
|
||||
.next(bulkExportPartitionStep())
|
||||
.next(closeJobStep())
|
||||
.build();
|
||||
}
|
||||
|
@ -114,7 +113,7 @@ public class BulkExportJobConfig {
|
|||
public Job groupBulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
|
||||
.validator(groupBulkJobParameterValidator())
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(groupPartitionStep())
|
||||
.next(closeJobStep())
|
||||
|
@ -125,7 +124,7 @@ public class BulkExportJobConfig {
|
|||
@Lazy
|
||||
public Job patientBulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(patientPartitionStep())
|
||||
.next(closeJobStep())
|
||||
|
@ -150,8 +149,9 @@ public class BulkExportJobConfig {
|
|||
return new CreateBulkExportEntityTasklet();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator bulkJobParameterValidator() {
|
||||
public JobParametersValidator bulkExportJobParameterValidator() {
|
||||
return new BulkExportJobParameterValidator();
|
||||
}
|
||||
|
||||
|
@ -159,7 +159,7 @@ public class BulkExportJobConfig {
|
|||
@Bean
|
||||
public Step groupBulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(groupBulkItemReader())
|
||||
.processor(inflateResourceThenAnnotateWithGoldenResourceProcessor())
|
||||
.writer(resourceToFileWriter())
|
||||
|
@ -170,17 +170,18 @@ public class BulkExportJobConfig {
|
|||
@Bean
|
||||
public Step bulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(bulkItemReader())
|
||||
.processor(myPidToIBaseResourceProcessor)
|
||||
.writer(resourceToFileWriter())
|
||||
.listener(bulkExportGenerateResourceFilesStepListener())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step patientBulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(patientBulkItemReader())
|
||||
.processor(myPidToIBaseResourceProcessor)
|
||||
.writer(resourceToFileWriter())
|
||||
|
@ -214,7 +215,7 @@ public class BulkExportJobConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public Step partitionStep() {
|
||||
public Step bulkExportPartitionStep() {
|
||||
return myStepBuilderFactory.get("partitionStep")
|
||||
.partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner())
|
||||
.step(bulkExportGenerateResourceFilesStep())
|
||||
|
@ -240,7 +241,7 @@ public class BulkExportJobConfig {
|
|||
|
||||
@Bean
|
||||
@StepScope
|
||||
public GroupBulkItemReader groupBulkItemReader(){
|
||||
public GroupBulkItemReader groupBulkItemReader() {
|
||||
return new GroupBulkItemReader();
|
||||
}
|
||||
|
||||
|
@ -252,7 +253,7 @@ public class BulkExportJobConfig {
|
|||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkItemReader bulkItemReader(){
|
||||
public BulkItemReader bulkItemReader() {
|
||||
return new BulkItemReader();
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
|||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.springframework.batch.core.JobParametersBuilder;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
|||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -43,7 +42,7 @@ public class BulkItemReader extends BaseBulkItemReader {
|
|||
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID);
|
||||
Set<ResourcePersistentId> myReadPids = new HashSet<>();
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -87,7 +87,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
|
|||
}
|
||||
}
|
||||
|
||||
private void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
|
||||
public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
|
||||
theChunkContext
|
||||
.getStepContext()
|
||||
.getStepExecution()
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
|||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
|
@ -36,7 +37,6 @@ import ca.uhn.fhir.model.primitive.IdDt;
|
|||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
@ -81,7 +81,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
Set<ResourcePersistentId> myReadPids = new HashSet<>();
|
||||
|
||||
//Short circuit out if we detect we are attempting to extract patients
|
||||
|
@ -119,7 +119,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
Set<Long> patientPidsToExport = new HashSet<>(pidsOrThrowException);
|
||||
|
||||
if (myMdmEnabled) {
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), srd);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
List<IMdmLinkDao.MdmPidTuple> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
goldenPidSourcePidTuple.forEach(tuple -> {
|
||||
|
@ -179,13 +180,12 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
* @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"]
|
||||
*/
|
||||
private List<String> getMembers() {
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
|
||||
List<IPrimitiveType> evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class);
|
||||
return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Given the local myGroupId, perform an expansion to retrieve all resource IDs of member patients.
|
||||
* if myMdmEnabled is set to true, we also reach out to the IMdmLinkDao to attempt to also expand it into matched
|
||||
|
@ -195,7 +195,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
*/
|
||||
private Set<String> expandAllPatientPidsFromGroup() {
|
||||
Set<String> expandedIds = new HashSet<>();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
|
||||
//Attempt to perform MDM Expansion of membership
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -26,7 +26,7 @@ import org.springframework.batch.core.JobParametersInvalidException;
|
|||
import org.springframework.batch.core.JobParametersValidator;
|
||||
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig.*;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig.*;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class GroupIdPresentValidator implements JobParametersValidator {
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -61,7 +61,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea
|
|||
}
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
|
||||
String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export";
|
||||
ourLog.error(errorMessage);
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -25,8 +25,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.util.BinaryUtil;
|
||||
|
@ -100,7 +101,7 @@ public class ResourceToFileWriter implements ItemWriter<List<IBaseResource>> {
|
|||
IBaseBinary binary = BinaryUtil.newBinary(myFhirContext);
|
||||
binary.setContentType(Constants.CT_FHIR_NDJSON);
|
||||
binary.setContent(myOutputStream.toByteArray());
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary);
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary, new SystemRequestDetails());
|
||||
return outcome.getResource().getIdElement();
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.core.partition.support.Partitioner;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.model;
|
||||
package ca.uhn.fhir.jpa.bulk.export.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,7 +20,14 @@ package ca.uhn.fhir.jpa.bulk.model;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public enum BulkJobStatusEnum {
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
|
||||
@JsonFormat(shape = JsonFormat.Shape.STRING)
|
||||
public enum BulkExportJobStatusEnum {
|
||||
|
||||
/**
|
||||
* Sorting OK!
|
||||
*/
|
||||
|
||||
SUBMITTED,
|
||||
BUILDING,
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.model;
|
||||
package ca.uhn.fhir.jpa.bulk.export.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.provider;
|
||||
package ca.uhn.fhir.jpa.bulk.export.provider;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.bulk.provider;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -23,16 +23,15 @@ package ca.uhn.fhir.jpa.bulk.svc;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
|
@ -43,16 +42,13 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
|||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.quartz.JobExecutionContext;
|
||||
|
@ -78,9 +74,9 @@ import java.util.Set;
|
|||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP;
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.GROUP;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.SYSTEM;
|
||||
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam;
|
||||
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -136,7 +132,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
|
||||
Optional<BulkExportJobEntity> jobToProcessOpt = myTxTemplate.execute(t -> {
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkExportJobStatusEnum.SUBMITTED);
|
||||
if (submittedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
@ -158,7 +154,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
Optional<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByJobId(jobUuid);
|
||||
if (submittedJobs.isPresent()) {
|
||||
BulkExportJobEntity jobEntity = submittedJobs.get();
|
||||
jobEntity.setStatus(BulkJobStatusEnum.ERROR);
|
||||
jobEntity.setStatus(BulkExportJobStatusEnum.ERROR);
|
||||
jobEntity.setStatusMessage(e.getMessage());
|
||||
myBulkExportJobDao.save(jobEntity);
|
||||
}
|
||||
|
@ -208,8 +204,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
|
||||
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
|
||||
getBinaryDao().delete(toId(nextFile.getResourceId()));
|
||||
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), null);
|
||||
getBinaryDao().delete(toId(nextFile.getResourceId()), new SystemRequestDetails());
|
||||
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails());
|
||||
myBulkExportCollectionFileDao.deleteByPid(nextFile.getId());
|
||||
|
||||
}
|
||||
|
@ -344,7 +340,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
if (useCache) {
|
||||
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
||||
Pageable page = PageRequest.of(0, 10);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkExportJobStatusEnum.ERROR);
|
||||
if (!existing.isEmpty()) {
|
||||
return toSubmittedJobInfo(existing.iterator().next());
|
||||
}
|
||||
|
@ -373,7 +369,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
|
||||
BulkExportJobEntity job = new BulkExportJobEntity();
|
||||
job.setJobId(UUID.randomUUID().toString());
|
||||
job.setStatus(BulkJobStatusEnum.SUBMITTED);
|
||||
job.setStatus(BulkExportJobStatusEnum.SUBMITTED);
|
||||
job.setSince(since);
|
||||
job.setCreated(new Date());
|
||||
job.setRequest(request);
|
||||
|
@ -445,7 +441,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
retVal.setStatusMessage(job.getStatusMessage());
|
||||
retVal.setRequest(job.getRequest());
|
||||
|
||||
if (job.getStatus() == BulkJobStatusEnum.COMPLETE) {
|
||||
if (job.getStatus() == BulkExportJobStatusEnum.COMPLETE) {
|
||||
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
retVal.addFile()
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.bulk.svc;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
|
@ -84,12 +82,12 @@ public class BulkExportDaoSvc {
|
|||
}
|
||||
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus) {
|
||||
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus) {
|
||||
setJobToStatus(theJobUUID, theStatus, null);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus, String theStatusMessage) {
|
||||
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus, String theStatusMessage) {
|
||||
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(theJobUUID);
|
||||
if (!oJob.isPresent()) {
|
||||
ourLog.error("Job with UUID {} doesn't exist!", theJobUUID);
|
|
@ -0,0 +1,93 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
|
||||
public interface IBulkDataImportSvc {
|
||||
|
||||
/**
|
||||
* Create a new job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state (meaning it won't yet be worked on and can be added to)
|
||||
*/
|
||||
String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List<BulkImportJobFileJson> theInitialFiles);
|
||||
|
||||
/**
|
||||
* Add more files to a job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @param theFiles The files to add to the job
|
||||
*/
|
||||
void addFilesToJob(String theJobId, List<BulkImportJobFileJson> theFiles);
|
||||
|
||||
/**
|
||||
* Move a job from {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING}
|
||||
* state to {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
|
||||
* state, meaning that is is a candidate to be picked up for processing
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
*/
|
||||
void markJobAsReadyForActivation(String theJobId);
|
||||
|
||||
/**
|
||||
* This method is intended to be called from the job scheduler, and will begin execution on
|
||||
* the next job in status {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
|
||||
*
|
||||
* @return Returns {@literal true} if a job was activated
|
||||
*/
|
||||
boolean activateNextReadyJob();
|
||||
|
||||
/**
|
||||
* Updates the job status for the given job
|
||||
*/
|
||||
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus);
|
||||
|
||||
/**
|
||||
* Updates the job status for the given job
|
||||
*/
|
||||
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage);
|
||||
|
||||
/**
|
||||
* Gets the number of files available for a given Job ID
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @return The file count
|
||||
*/
|
||||
BulkImportJobJson fetchJob(String theJobId);
|
||||
|
||||
/**
|
||||
* Fetch a given file by job ID
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @param theFileIndex The index of the file within the job
|
||||
* @return The file
|
||||
*/
|
||||
BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex);
|
||||
|
||||
/**
|
||||
* Delete all input files associated with a particular job
|
||||
*/
|
||||
void deleteJobFiles(String theJobId);
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
*/
|
||||
public class ActivateBulkImportEntityStepListener implements StepExecutionListener {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkImportDaoSvc;
|
||||
|
||||
@Override
|
||||
public void beforeStep(StepExecution theStepExecution) {
|
||||
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
|
||||
if (jobUuid != null) {
|
||||
myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExitStatus afterStep(StepExecution theStepExecution) {
|
||||
return ExitStatus.EXECUTING;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import ca.uhn.fhir.util.IoUtil;
|
||||
import com.google.common.io.LineReader;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.io.StringReader;
|
||||
|
||||
@SuppressWarnings("UnstableApiUsage")
|
||||
public class BulkImportFileReader implements ItemReader<ParsedBulkImportRecord> {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUuid;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
|
||||
private int myFileIndex;
|
||||
|
||||
private StringReader myReader;
|
||||
private LineReader myLineReader;
|
||||
private int myLineIndex;
|
||||
private String myTenantName;
|
||||
|
||||
@Override
|
||||
public ParsedBulkImportRecord read() throws Exception {
|
||||
|
||||
if (myReader == null) {
|
||||
BulkImportJobFileJson file = myBulkDataImportSvc.fetchFile(myJobUuid, myFileIndex);
|
||||
myTenantName = file.getTenantName();
|
||||
myReader = new StringReader(file.getContents());
|
||||
myLineReader = new LineReader(myReader);
|
||||
}
|
||||
|
||||
String nextLine = myLineReader.readLine();
|
||||
if (nextLine == null) {
|
||||
IoUtil.closeQuietly(myReader);
|
||||
return null;
|
||||
}
|
||||
|
||||
Logs.getBatchTroubleshootingLog().debug("Reading line {} file index {} for job: {}", myLineIndex++, myFileIndex, myJobUuid);
|
||||
|
||||
IBaseResource parsed = myFhirContext.newJsonParser().parseResource(nextLine);
|
||||
return new ParsedBulkImportRecord(myTenantName, parsed);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class BulkImportFileWriter implements ItemWriter<ParsedBulkImportRecord> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class);
|
||||
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUuid;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
|
||||
private int myFileIndex;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.ROW_PROCESSING_MODE + "']}")
|
||||
private JobFileRowProcessingModeEnum myRowProcessingMode;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
@SuppressWarnings({"SwitchStatementWithTooFewBranches", "rawtypes", "unchecked"})
|
||||
@Override
|
||||
public void write(List<? extends ParsedBulkImportRecord> theItemLists) throws Exception {
|
||||
ourLog.info("Beginning bulk import write {} chunks Job[{}] FileIndex[{}]", theItemLists.size(), myJobUuid, myFileIndex);
|
||||
|
||||
for (ParsedBulkImportRecord nextItem : theItemLists) {
|
||||
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setTenantId(nextItem.getTenantName());
|
||||
|
||||
// Yeah this is a lame switch - We'll add more later I swear
|
||||
switch (myRowProcessingMode) {
|
||||
default:
|
||||
case FHIR_TRANSACTION:
|
||||
IFhirSystemDao systemDao = myDaoRegistry.getSystemDao();
|
||||
IBaseResource inputBundle = nextItem.getRowContent();
|
||||
systemDao.transactionNested(requestDetails, inputBundle);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
import org.springframework.batch.core.step.tasklet.Tasklet;
|
||||
import org.springframework.batch.repeat.RepeatStatus;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
*/
|
||||
public class BulkImportJobCloser implements Tasklet {
|
||||
|
||||
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUUID;
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
|
||||
@Override
|
||||
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
|
||||
BatchStatus executionStatus = theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus();
|
||||
if (executionStatus == BatchStatus.STARTED) {
|
||||
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.COMPLETE);
|
||||
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
|
||||
} else {
|
||||
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.ERROR, "Found job in status: " + executionStatus);
|
||||
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
|
||||
}
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,169 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchConstants;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.JobScope;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.partition.PartitionHandler;
|
||||
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.batch.repeat.CompletionPolicy;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.core.task.TaskExecutor;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* Bulk Export job.
|
||||
*/
|
||||
@Configuration
|
||||
public class BulkImportJobConfig {
|
||||
|
||||
public static final String JOB_PARAM_COMMIT_INTERVAL = "commitInterval";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR)
|
||||
private TaskExecutor myTaskExecutor;
|
||||
|
||||
@Bean(name = BULK_IMPORT_JOB_NAME)
|
||||
@Lazy
|
||||
public Job bulkImportJob() throws Exception {
|
||||
return myJobBuilderFactory.get(BULK_IMPORT_JOB_NAME)
|
||||
.validator(bulkImportJobParameterValidator())
|
||||
.start(bulkImportPartitionStep())
|
||||
.next(bulkImportCloseJobStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator bulkImportJobParameterValidator() {
|
||||
return new BulkImportJobParameterValidator();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CreateBulkImportEntityTasklet createBulkImportEntityTasklet() {
|
||||
return new CreateBulkImportEntityTasklet();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public ActivateBulkImportEntityStepListener activateBulkImportEntityStepListener() {
|
||||
return new ActivateBulkImportEntityStepListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step bulkImportPartitionStep() throws Exception {
|
||||
return myStepBuilderFactory.get("bulkImportPartitionStep")
|
||||
.partitioner("bulkImportPartitionStep", bulkImportPartitioner())
|
||||
.partitionHandler(partitionHandler())
|
||||
.listener(activateBulkImportEntityStepListener())
|
||||
.gridSize(10)
|
||||
.build();
|
||||
}
|
||||
|
||||
private PartitionHandler partitionHandler() throws Exception {
|
||||
assert myTaskExecutor != null;
|
||||
|
||||
TaskExecutorPartitionHandler retVal = new TaskExecutorPartitionHandler();
|
||||
retVal.setStep(bulkImportProcessFilesStep());
|
||||
retVal.setTaskExecutor(myTaskExecutor);
|
||||
retVal.afterPropertiesSet();
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step bulkImportCloseJobStep() {
|
||||
return myStepBuilderFactory.get("bulkImportCloseJobStep")
|
||||
.tasklet(bulkImportJobCloser())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public BulkImportJobCloser bulkImportJobCloser() {
|
||||
return new BulkImportJobCloser();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public BulkImportPartitioner bulkImportPartitioner() {
|
||||
return new BulkImportPartitioner();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Step bulkImportProcessFilesStep() {
|
||||
CompletionPolicy completionPolicy = completionPolicy();
|
||||
|
||||
return myStepBuilderFactory.get("bulkImportProcessFilesStep")
|
||||
.<ParsedBulkImportRecord, ParsedBulkImportRecord>chunk(completionPolicy)
|
||||
.reader(bulkImportFileReader())
|
||||
.writer(bulkImportFileWriter())
|
||||
.listener(bulkImportStepListener())
|
||||
.listener(completionPolicy)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public CompletionPolicy completionPolicy() {
|
||||
return new BulkImportProcessStepCompletionPolicy();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ItemWriter<ParsedBulkImportRecord> bulkImportFileWriter() {
|
||||
return new BulkImportFileWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkImportFileReader bulkImportFileReader() {
|
||||
return new BulkImportFileReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkImportStepListener bulkImportStepListener() {
|
||||
return new BulkImportStepListener();
|
||||
}
|
||||
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue