Syncing master into working branch.

This commit is contained in:
Diederik Muylwyk 2019-08-01 13:51:26 -04:00
commit d67de13708
43 changed files with 2481 additions and 248 deletions

View File

@ -69,6 +69,12 @@
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
<optional>true</optional>
<type>pom</type>
</dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId> <artifactId>commons-text</artifactId>

View File

@ -20,6 +20,7 @@ import ca.uhn.fhir.util.ReflectionUtil;
import ca.uhn.fhir.util.VersionUtil; import ca.uhn.fhir.util.VersionUtil;
import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.FhirValidator;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.jena.riot.Lang;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -111,7 +112,7 @@ public class FhirContext {
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()} * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/ */
@Deprecated @Deprecated
public FhirContext(Class<? extends IBaseResource> theResourceType) { public FhirContext(final Class<? extends IBaseResource> theResourceType) {
this(toCollection(theResourceType)); this(toCollection(theResourceType));
} }
@ -120,7 +121,7 @@ public class FhirContext {
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()} * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/ */
@Deprecated @Deprecated
public FhirContext(Class<?>... theResourceTypes) { public FhirContext(final Class<?>... theResourceTypes) {
this(toCollection(theResourceTypes)); this(toCollection(theResourceTypes));
} }
@ -129,7 +130,7 @@ public class FhirContext {
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()} * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/ */
@Deprecated @Deprecated
public FhirContext(Collection<Class<? extends IBaseResource>> theResourceTypes) { public FhirContext(final Collection<Class<? extends IBaseResource>> theResourceTypes) {
this(null, theResourceTypes); this(null, theResourceTypes);
} }
@ -138,11 +139,11 @@ public class FhirContext {
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}, but * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}, but
* this method can also be used if you wish to supply the version programmatically. * this method can also be used if you wish to supply the version programmatically.
*/ */
public FhirContext(FhirVersionEnum theVersion) { public FhirContext(final FhirVersionEnum theVersion) {
this(theVersion, null); this(theVersion, null);
} }
private FhirContext(FhirVersionEnum theVersion, Collection<Class<? extends IBaseResource>> theResourceTypes) { private FhirContext(final FhirVersionEnum theVersion, final Collection<Class<? extends IBaseResource>> theResourceTypes) {
VersionUtil.getVersion(); VersionUtil.getVersion();
if (theVersion != null) { if (theVersion != null) {
@ -191,7 +192,7 @@ public class FhirContext {
} }
private String createUnknownResourceNameError(String theResourceName, FhirVersionEnum theVersion) { private String createUnknownResourceNameError(final String theResourceName, final FhirVersionEnum theVersion) {
return getLocalizer().getMessage(FhirContext.class, "unknownResourceName", theResourceName, theVersion); return getLocalizer().getMessage(FhirContext.class, "unknownResourceName", theResourceName, theVersion);
} }
@ -232,7 +233,7 @@ public class FhirContext {
* *
* @param theAddProfileTagWhenEncoding The add profile mode (must not be <code>null</code>) * @param theAddProfileTagWhenEncoding The add profile mode (must not be <code>null</code>)
*/ */
public void setAddProfileTagWhenEncoding(AddProfileTagEnum theAddProfileTagWhenEncoding) { public void setAddProfileTagWhenEncoding(final AddProfileTagEnum theAddProfileTagWhenEncoding) {
Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null"); Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null");
myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding; myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding;
} }
@ -247,7 +248,7 @@ public class FhirContext {
* *
* @see #setDefaultTypeForProfile(String, Class) * @see #setDefaultTypeForProfile(String, Class)
*/ */
public Class<? extends IBaseResource> getDefaultTypeForProfile(String theProfile) { public Class<? extends IBaseResource> getDefaultTypeForProfile(final String theProfile) {
validateInitialized(); validateInitialized();
return myDefaultTypeForProfile.get(theProfile); return myDefaultTypeForProfile.get(theProfile);
} }
@ -257,7 +258,7 @@ public class FhirContext {
* for extending the core library. * for extending the core library.
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public BaseRuntimeElementDefinition<?> getElementDefinition(Class<? extends IBase> theElementType) { public BaseRuntimeElementDefinition<?> getElementDefinition(final Class<? extends IBase> theElementType) {
validateInitialized(); validateInitialized();
BaseRuntimeElementDefinition<?> retVal = myClassToElementDefinition.get(theElementType); BaseRuntimeElementDefinition<?> retVal = myClassToElementDefinition.get(theElementType);
if (retVal == null) { if (retVal == null) {
@ -273,7 +274,7 @@ public class FhirContext {
* Note that this method is case insensitive! * Note that this method is case insensitive!
* </p> * </p>
*/ */
public BaseRuntimeElementDefinition<?> getElementDefinition(String theElementName) { public BaseRuntimeElementDefinition<?> getElementDefinition(final String theElementName) {
validateInitialized(); validateInitialized();
return myNameToElementDefinition.get(theElementName.toLowerCase()); return myNameToElementDefinition.get(theElementName.toLowerCase());
} }
@ -301,7 +302,7 @@ public class FhirContext {
* This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with * This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with
* caution * caution
*/ */
public void setLocalizer(HapiLocalizer theMessages) { public void setLocalizer(final HapiLocalizer theMessages) {
myLocalizer = theMessages; myLocalizer = theMessages;
} }
@ -309,7 +310,7 @@ public class FhirContext {
return myNarrativeGenerator; return myNarrativeGenerator;
} }
public void setNarrativeGenerator(INarrativeGenerator theNarrativeGenerator) { public void setNarrativeGenerator(final INarrativeGenerator theNarrativeGenerator) {
myNarrativeGenerator = theNarrativeGenerator; myNarrativeGenerator = theNarrativeGenerator;
} }
@ -329,7 +330,7 @@ public class FhirContext {
* *
* @param theParserOptions The parser options object - Must not be <code>null</code> * @param theParserOptions The parser options object - Must not be <code>null</code>
*/ */
public void setParserOptions(ParserOptions theParserOptions) { public void setParserOptions(final ParserOptions theParserOptions) {
Validate.notNull(theParserOptions, "theParserOptions must not be null"); Validate.notNull(theParserOptions, "theParserOptions must not be null");
myParserOptions = theParserOptions; myParserOptions = theParserOptions;
} }
@ -360,7 +361,7 @@ public class FhirContext {
* *
* @see PerformanceOptionsEnum for a list of available options * @see PerformanceOptionsEnum for a list of available options
*/ */
public void setPerformanceOptions(Collection<PerformanceOptionsEnum> theOptions) { public void setPerformanceOptions(final Collection<PerformanceOptionsEnum> theOptions) {
myPerformanceOptions.clear(); myPerformanceOptions.clear();
if (theOptions != null) { if (theOptions != null) {
myPerformanceOptions.addAll(theOptions); myPerformanceOptions.addAll(theOptions);
@ -371,7 +372,7 @@ public class FhirContext {
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed * Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library. * for extending the core library.
*/ */
public RuntimeResourceDefinition getResourceDefinition(Class<? extends IBaseResource> theResourceType) { public RuntimeResourceDefinition getResourceDefinition(final Class<? extends IBaseResource> theResourceType) {
validateInitialized(); validateInitialized();
if (theResourceType == null) { if (theResourceType == null) {
throw new NullPointerException("theResourceType can not be null"); throw new NullPointerException("theResourceType can not be null");
@ -387,7 +388,7 @@ public class FhirContext {
return retVal; return retVal;
} }
public RuntimeResourceDefinition getResourceDefinition(FhirVersionEnum theVersion, String theResourceName) { public RuntimeResourceDefinition getResourceDefinition(final FhirVersionEnum theVersion, final String theResourceName) {
Validate.notNull(theVersion, "theVersion can not be null"); Validate.notNull(theVersion, "theVersion can not be null");
validateInitialized(); validateInitialized();
@ -419,7 +420,7 @@ public class FhirContext {
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed * Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library. * for extending the core library.
*/ */
public RuntimeResourceDefinition getResourceDefinition(IBaseResource theResource) { public RuntimeResourceDefinition getResourceDefinition(final IBaseResource theResource) {
validateInitialized(); validateInitialized();
Validate.notNull(theResource, "theResource must not be null"); Validate.notNull(theResource, "theResource must not be null");
return getResourceDefinition(theResource.getClass()); return getResourceDefinition(theResource.getClass());
@ -434,7 +435,7 @@ public class FhirContext {
* *
* @throws DataFormatException If the resource name is not known * @throws DataFormatException If the resource name is not known
*/ */
public RuntimeResourceDefinition getResourceDefinition(String theResourceName) throws DataFormatException { public RuntimeResourceDefinition getResourceDefinition(final String theResourceName) throws DataFormatException {
validateInitialized(); validateInitialized();
Validate.notBlank(theResourceName, "theResourceName must not be blank"); Validate.notBlank(theResourceName, "theResourceName must not be blank");
@ -462,7 +463,7 @@ public class FhirContext {
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed * Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library. * for extending the core library.
*/ */
public RuntimeResourceDefinition getResourceDefinitionById(String theId) { public RuntimeResourceDefinition getResourceDefinitionById(final String theId) {
validateInitialized(); validateInitialized();
return myIdToResourceDefinition.get(theId); return myIdToResourceDefinition.get(theId);
} }
@ -528,7 +529,7 @@ public class FhirContext {
* *
* @param theRestfulClientFactory * @param theRestfulClientFactory
*/ */
public void setRestfulClientFactory(IRestfulClientFactory theRestfulClientFactory) { public void setRestfulClientFactory(final IRestfulClientFactory theRestfulClientFactory) {
Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null"); Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null");
this.myRestfulClientFactory = theRestfulClientFactory; this.myRestfulClientFactory = theRestfulClientFactory;
} }
@ -619,6 +620,23 @@ public class FhirContext {
return new JsonParser(this, myParserErrorHandler); return new JsonParser(this, myParserErrorHandler);
} }
/**
* Create and return a new RDF parser.
*
* <p>
* Thread safety: <b>Parsers are not guaranteed to be thread safe</b>. Create a new parser instance for every thread
* or every message being parsed/encoded.
* </p>
* <p>
* Performance Note: <b>This method is cheap</b> to call, and may be called once for every message being processed
* without incurring any performance penalty
* </p>
*/
public IParser newRDFParser() {
return new RDFParser(this, myParserErrorHandler, Lang.TURTLE);
}
/** /**
* Instantiates a new client instance. This method requires an interface which is defined specifically for your use * Instantiates a new client instance. This method requires an interface which is defined specifically for your use
* cases to contain methods for each of the RESTful operations you wish to implement (e.g. "read ImagingStudy", * cases to contain methods for each of the RESTful operations you wish to implement (e.g. "read ImagingStudy",
@ -637,7 +655,7 @@ public class FhirContext {
* @return A newly created client * @return A newly created client
* @throws ConfigurationException If the interface type is not an interface * @throws ConfigurationException If the interface type is not an interface
*/ */
public <T extends IRestfulClient> T newRestfulClient(Class<T> theClientType, String theServerBase) { public <T extends IRestfulClient> T newRestfulClient(final Class<T> theClientType, final String theServerBase) {
return getRestfulClientFactory().newClient(theClientType, theServerBase); return getRestfulClientFactory().newClient(theClientType, theServerBase);
} }
@ -653,7 +671,7 @@ public class FhirContext {
* *
* @param theServerBase The URL of the base for the restful FHIR server to connect to * @param theServerBase The URL of the base for the restful FHIR server to connect to
*/ */
public IGenericClient newRestfulGenericClient(String theServerBase) { public IGenericClient newRestfulGenericClient(final String theServerBase) {
return getRestfulClientFactory().newGenericClient(theServerBase); return getRestfulClientFactory().newGenericClient(theServerBase);
} }
@ -704,7 +722,7 @@ public class FhirContext {
* *
* @param theType The custom type to add (must not be <code>null</code>) * @param theType The custom type to add (must not be <code>null</code>)
*/ */
public void registerCustomType(Class<? extends IBase> theType) { public void registerCustomType(final Class<? extends IBase> theType) {
Validate.notNull(theType, "theType must not be null"); Validate.notNull(theType, "theType must not be null");
ensureCustomTypeList(); ensureCustomTypeList();
@ -723,7 +741,7 @@ public class FhirContext {
* *
* @param theTypes The custom types to add (must not be <code>null</code> or contain null elements in the collection) * @param theTypes The custom types to add (must not be <code>null</code> or contain null elements in the collection)
*/ */
public void registerCustomTypes(Collection<Class<? extends IBase>> theTypes) { public void registerCustomTypes(final Collection<Class<? extends IBase>> theTypes) {
Validate.notNull(theTypes, "theTypes must not be null"); Validate.notNull(theTypes, "theTypes must not be null");
Validate.noNullElements(theTypes.toArray(), "theTypes must not contain any null elements"); Validate.noNullElements(theTypes.toArray(), "theTypes must not contain any null elements");
@ -732,22 +750,22 @@ public class FhirContext {
myCustomTypes.addAll(theTypes); myCustomTypes.addAll(theTypes);
} }
private BaseRuntimeElementDefinition<?> scanDatatype(Class<? extends IElement> theResourceType) { private BaseRuntimeElementDefinition<?> scanDatatype(final Class<? extends IElement> theResourceType) {
ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<>(); ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<Class<? extends IElement>>();
resourceTypes.add(theResourceType); resourceTypes.add(theResourceType);
Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> defs = scanResourceTypes(resourceTypes); Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> defs = scanResourceTypes(resourceTypes);
return defs.get(theResourceType); return defs.get(theResourceType);
} }
private RuntimeResourceDefinition scanResourceType(Class<? extends IBaseResource> theResourceType) { private RuntimeResourceDefinition scanResourceType(final Class<? extends IBaseResource> theResourceType) {
ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<>(); ArrayList<Class<? extends IElement>> resourceTypes = new ArrayList<Class<? extends IElement>>();
resourceTypes.add(theResourceType); resourceTypes.add(theResourceType);
Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> defs = scanResourceTypes(resourceTypes); Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> defs = scanResourceTypes(resourceTypes);
return (RuntimeResourceDefinition) defs.get(theResourceType); return (RuntimeResourceDefinition) defs.get(theResourceType);
} }
private synchronized Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> scanResourceTypes(Collection<Class<? extends IElement>> theResourceTypes) { private synchronized Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> scanResourceTypes(final Collection<Class<? extends IElement>> theResourceTypes) {
List<Class<? extends IBase>> typesToScan = new ArrayList<>(); List<Class<? extends IBase>> typesToScan = new ArrayList<Class<? extends IBase>>();
if (theResourceTypes != null) { if (theResourceTypes != null) {
typesToScan.addAll(theResourceTypes); typesToScan.addAll(theResourceTypes);
} }
@ -819,7 +837,7 @@ public class FhirContext {
* <code>null</code> or empty. * <code>null</code> or empty.
* @param theClass The resource type, or <code>null</code> to clear any existing type * @param theClass The resource type, or <code>null</code> to clear any existing type
*/ */
public void setDefaultTypeForProfile(String theProfile, Class<? extends IBaseResource> theClass) { public void setDefaultTypeForProfile(final String theProfile, final Class<? extends IBaseResource> theClass) {
Validate.notBlank(theProfile, "theProfile must not be null or empty"); Validate.notBlank(theProfile, "theProfile must not be null or empty");
if (theClass == null) { if (theClass == null) {
myDefaultTypeForProfile.remove(theProfile); myDefaultTypeForProfile.remove(theProfile);
@ -833,7 +851,7 @@ public class FhirContext {
* *
* @param theParserErrorHandler The error handler * @param theParserErrorHandler The error handler
*/ */
public void setParserErrorHandler(IParserErrorHandler theParserErrorHandler) { public void setParserErrorHandler(final IParserErrorHandler theParserErrorHandler) {
Validate.notNull(theParserErrorHandler, "theParserErrorHandler must not be null"); Validate.notNull(theParserErrorHandler, "theParserErrorHandler must not be null");
myParserErrorHandler = theParserErrorHandler; myParserErrorHandler = theParserErrorHandler;
} }
@ -843,7 +861,7 @@ public class FhirContext {
* *
* @see PerformanceOptionsEnum for a list of available options * @see PerformanceOptionsEnum for a list of available options
*/ */
public void setPerformanceOptions(PerformanceOptionsEnum... thePerformanceOptions) { public void setPerformanceOptions(final PerformanceOptionsEnum... thePerformanceOptions) {
Collection<PerformanceOptionsEnum> asList = null; Collection<PerformanceOptionsEnum> asList = null;
if (thePerformanceOptions != null) { if (thePerformanceOptions != null) {
asList = Arrays.asList(thePerformanceOptions); asList = Arrays.asList(thePerformanceOptions);
@ -852,7 +870,7 @@ public class FhirContext {
} }
@SuppressWarnings({"cast"}) @SuppressWarnings({"cast"})
private List<Class<? extends IElement>> toElementList(Collection<Class<? extends IBaseResource>> theResourceTypes) { private List<Class<? extends IElement>> toElementList(final Collection<Class<? extends IBaseResource>> theResourceTypes) {
if (theResourceTypes == null) { if (theResourceTypes == null) {
return null; return null;
} }
@ -922,8 +940,8 @@ public class FhirContext {
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static List<Class<? extends IBaseResource>> toCollection(Class<?>[] theResourceTypes) { private static List<Class<? extends IBaseResource>> toCollection(final Class<?>[] theResourceTypes) {
ArrayList<Class<? extends IBaseResource>> retVal = new ArrayList<>(1); ArrayList<Class<? extends IBaseResource>> retVal = new ArrayList<Class<? extends IBaseResource>>(1);
for (Class<?> clazz : theResourceTypes) { for (Class<?> clazz : theResourceTypes) {
if (!IResource.class.isAssignableFrom(clazz)) { if (!IResource.class.isAssignableFrom(clazz)) {
throw new IllegalArgumentException(clazz.getCanonicalName() + " is not an instance of " + IResource.class.getSimpleName()); throw new IllegalArgumentException(clazz.getCanonicalName() + " is not an instance of " + IResource.class.getSimpleName());

View File

@ -0,0 +1,542 @@
package ca.uhn.fhir.parser;
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ISupportsUndeclaredExtensions;
import ca.uhn.fhir.narrative.INarrativeGenerator;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.util.ElementUtil;
import ca.uhn.fhir.util.rdf.RDFUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.jena.riot.Lang;
import org.apache.jena.riot.system.StreamRDF;
import org.hl7.fhir.instance.model.api.*;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This class is the FHIR RDF parser/encoder. Users should not interact with this class directly, but should use
* {@link FhirContext#newRDFParser()} to get an instance.
*/
public class RDFParser extends BaseParser {
private static final String FHIR_NS = "http://hl7.org/fhir";
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RDFParser.class);
private FhirContext context;
private Lang lang;
/**
* Do not use this constructor, the recommended way to obtain a new instance of the RDF parser is to invoke
* {@link FhirContext#newRDFParser()}.
*
* @param parserErrorHandler the Parser Error Handler
*/
public RDFParser(final FhirContext context, final IParserErrorHandler parserErrorHandler, final Lang lang) {
super(context, parserErrorHandler);
this.context = context;
this.lang = lang;
}
@Override
protected void doEncodeResourceToWriter(final IBaseResource resource,
final Writer writer,
final EncodeContext encodeContext) {
StreamRDF eventWriter = RDFUtil.createRDFWriter(writer, this.lang);
eventWriter.base(FHIR_NS);
encodeResourceToRDFStreamWriter(resource, eventWriter, encodeContext);
}
@Override
protected <T extends IBaseResource> T doParseResource(final Class<T> resourceType,
final Reader reader) throws DataFormatException {
StreamRDF streamReader = RDFUtil.createRDFReader(reader, this.lang);
streamReader.base(FHIR_NS);
return parseResource(resourceType, streamReader);
}
@Override
public EncodingEnum getEncoding() {
return EncodingEnum.RDF;
}
@Override
public IParser setPrettyPrint(final boolean prettyPrint) {
return this;
}
private void encodeResourceToRDFStreamWriter(final IBaseResource resource,
final StreamRDF streamWriter,
final boolean containedResource,
final IIdType resourceId,
final EncodeContext encodeContext) {
RuntimeResourceDefinition resDef = this.context.getResourceDefinition(resource);
if (resDef == null) {
throw new ConfigurationException("Unknown resource type: " + resource.getClass());
}
if (!containedResource) {
super.containResourcesForEncoding(resource);
}
if (resource instanceof IAnyResource) {
// HL7.org Structures
if (resourceId != null) {
writeCommentsPre(streamWriter, resourceId);
streamWriter.start();
streamWriter.triple(RDFUtil.triple("<value> " + resourceId.getIdPart() + " </value>"));
streamWriter.finish();
writeCommentsPost(streamWriter, resourceId);
}
encodeCompositeElementToStreamWriter(resource, resource, streamWriter, containedResource, new CompositeChildElement(resDef, encodeContext), encodeContext);
} else {
// DSTU2+
if (resourceId != null) {
streamWriter.start();
streamWriter.triple(RDFUtil.triple("<value> " + resourceId.getIdPart() + " </value>"));
encodeExtensionsIfPresent(resource, streamWriter, resourceId, false, encodeContext);
streamWriter.finish();
writeCommentsPost(streamWriter, resourceId);
}
/*
InstantDt updated = (InstantDt) resource.getResourceMetadata().get(ResourceMetadataKeyEnum.UPDATED);
IdDt idDt = resource.getId();
String versionIdPart = idDt.getVersionIdPart();
if (isBlank(versionIdPart)) {
versionIdPart = ResourceMetadataKeyEnum.VERSION.get(resource);
}
List<BaseCodingDt> securityLabels = extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.SECURITY_LABELS);
List<? extends IIdType> profiles = extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.PROFILES);
profiles = super.getProfileTagsForEncoding(resource, profiles);
TagList tags = getMetaTagsForEncoding((resource), encodeContext);
if (!ElementUtil.isEmpty(versionIdPart, updated, securityLabels, tags, profiles)) {
streamWriter.start();
for (IIdType profile : profiles) {
streamWriter.start();
streamWriter.triple(RDFUtil.triple("<value> " + profile.getValue() + " </value>"));
streamWriter.finish();
}
for (BaseCodingDt securityLabel : securityLabels) {
streamWriter.start();
encodeCompositeElementToStreamWriter(resource, securityLabel, streamWriter, containedResource, null, encodeContext);
streamWriter.finish();
}
if (tags != null) {
for (Tag tag : tags) {
if (tag.isEmpty()) {
continue;
}
streamWriter.start();
streamWriter.triple(RDFUtil.triple("<system> " + tag.getScheme() + " </system>"));
streamWriter.triple(RDFUtil.triple("<code> " + tag.getTerm() + " </code>"));
streamWriter.triple(RDFUtil.triple("<display> " + tag.getLabel() + " </display>"));
streamWriter.finish();
}
}
streamWriter.finish();
}
*/
if (resource instanceof IBaseBinary) {
IBaseBinary bin = (IBaseBinary) resource;
streamWriter.triple(RDFUtil.triple("<contentType> " + bin.getContentType() + " </contentType>"));
streamWriter.triple(RDFUtil.triple("<content> " + bin.getContentAsBase64() + " </content>"));
} else {
encodeCompositeElementToStreamWriter(resource, resource, streamWriter, containedResource, new CompositeChildElement(resDef, encodeContext), encodeContext);
}
}
streamWriter.finish();
}
private void writeCommentsPre(final StreamRDF eventWriter, final IBase element) {
if (element != null && element.hasFormatComment()) {
for (String next : element.getFormatCommentsPre()) {
if (isNotBlank(next)) {
eventWriter.base(next);
}
}
}
}
private void writeCommentsPost(final StreamRDF eventWriter, final IBase element) {
if (element != null && element.hasFormatComment()) {
for (String next : element.getFormatCommentsPost()) {
if (isNotBlank(next)) {
eventWriter.base(next);
}
}
}
}
private void encodeChildElementToStreamWriter(final IBaseResource resource,
final StreamRDF eventWriter,
final BaseRuntimeChildDefinition childDefinition,
final IBase element,
final String childName,
final BaseRuntimeElementDefinition<?> childDef,
final String extensionUrl,
final boolean includedResource,
final CompositeChildElement parent,
final EncodeContext encodeContext) {
String childGenericName = childDefinition.getElementName();
encodeContext.pushPath(childGenericName, false);
try {
if (element == null || element.isEmpty()) {
if (!isChildContained(childDef, includedResource)) {
return;
}
}
writeCommentsPre(eventWriter, element);
switch (childDef.getChildType()) {
case ID_DATATYPE: {
IIdType value = (IIdType) element;
assert value != null;
String encodedValue = "id".equals(childName) ? value.getIdPart() : value.getValue();
if (StringUtils.isNotBlank(encodedValue) || !hasNoExtensions(value)) {
eventWriter.start();
if (StringUtils.isNotBlank(encodedValue)) {
eventWriter.triple(RDFUtil.triple("<value> " + encodedValue + " </value>"));
}
encodeExtensionsIfPresent(resource, eventWriter, element, includedResource, encodeContext);
eventWriter.finish();
}
break;
}
case PRIMITIVE_DATATYPE: {
IPrimitiveType<?> pd = (IPrimitiveType) element;
assert pd != null;
String value = pd.getValueAsString();
if (value != null || !hasNoExtensions(pd)) {
eventWriter.start();
String elementId = getCompositeElementId(element);
if (isNotBlank(elementId)) {
eventWriter.triple(RDFUtil.triple("<id> " + elementId + " </id>"));
}
if (value != null) {
eventWriter.triple(RDFUtil.triple("<value> " + value + " </value>"));
}
encodeExtensionsIfPresent(resource, eventWriter, element, includedResource, encodeContext);
eventWriter.finish();
}
break;
}
case RESOURCE_BLOCK:
case COMPOSITE_DATATYPE: {
eventWriter.start();
String elementId = getCompositeElementId(element);
if (isNotBlank(elementId)) {
eventWriter.triple(RDFUtil.triple("<id> " + elementId + " </id>"));
}
if (isNotBlank(extensionUrl)) {
eventWriter.triple(RDFUtil.triple("<url> " + extensionUrl + " </url>"));
}
encodeCompositeElementToStreamWriter(resource, element, eventWriter, includedResource, parent, encodeContext);
eventWriter.finish();
break;
}
case CONTAINED_RESOURCE_LIST:
case CONTAINED_RESOURCES: {
/*
* Disable per #103 for (IResource next : value.getContainedResources()) { if (getContainedResources().getResourceId(next) != null) { continue; }
* theEventWriter.writeStartElement("contained"); encodeResourceToRDFStreamWriter(next, theEventWriter, true, fixContainedResourceId(next.getId().getValue()));
* theEventWriter.writeEndElement(); }
*/
for (IBaseResource next : getContainedResources().getContainedResources()) {
IIdType resourceId = getContainedResources().getResourceId(next);
eventWriter.start();
encodeResourceToRDFStreamWriter(next, eventWriter, true, fixContainedResourceId(resourceId.getValue()), encodeContext);
eventWriter.finish();
}
break;
}
case RESOURCE: {
IBaseResource baseResource = (IBaseResource) element;
String resourceName = this.context.getResourceDefinition(baseResource).getName();
if (!super.shouldEncodeResource(resourceName)) {
break;
}
eventWriter.start();
encodeContext.pushPath(resourceName, true);
encodeResourceToRDFStreamWriter(resource, eventWriter, encodeContext);
encodeContext.popPath();
eventWriter.finish();
break;
}
case EXTENSION_DECLARED:
case UNDECL_EXT: {
throw new IllegalStateException("state should not happen: " + childDef.getName());
}
}
writeCommentsPost(eventWriter, element);
} finally {
encodeContext.popPath();
}
}
private void encodeResourceToRDFStreamWriter(final IBaseResource resource,
final StreamRDF eventWriter,
final EncodeContext encodeContext) {
IIdType resourceId = null;
if (StringUtils.isNotBlank(resource.getIdElement().getIdPart())) {
resourceId = resource.getIdElement();
if (resource.getIdElement().getValue().startsWith("urn:")) {
resourceId = null;
}
}
if (!super.shouldEncodeResourceId(resource, encodeContext)) {
resourceId = null;
} else if (encodeContext.getResourcePath().size() == 1 && getEncodeForceResourceId() != null) {
resourceId = getEncodeForceResourceId();
}
encodeResourceToRDFStreamWriter(resource, eventWriter, false, resourceId, encodeContext);
}
private void encodeUndeclaredExtensions(final IBaseResource resource,
final StreamRDF eventWriter,
final List<? extends IBaseExtension<?, ?>> extensions,
final boolean includedResource,
final EncodeContext encodeContext) {
for (IBaseExtension<?, ?> next : extensions) {
if (next == null || (ElementUtil.isEmpty(next.getValue()) && next.getExtension().isEmpty())) {
continue;
}
writeCommentsPre(eventWriter, next);
eventWriter.start();
String elementId = getCompositeElementId(next);
if (isNotBlank(elementId)) {
eventWriter.triple(RDFUtil.triple("<id> " + elementId + " </id>"));
}
String url = getExtensionUrl(next.getUrl());
eventWriter.triple(RDFUtil.triple("<url> " + url + " </url>"));
if (next.getValue() != null) {
IBaseDatatype value = next.getValue();
RuntimeChildUndeclaredExtensionDefinition extDef = this.context.getRuntimeChildUndeclaredExtensionDefinition();
String childName = extDef.getChildNameByDatatype(value.getClass());
BaseRuntimeElementDefinition<?> childDef;
if (childName == null) {
childDef = this.context.getElementDefinition(value.getClass());
if (childDef == null) {
throw new ConfigurationException("Unable to encode extension, unrecognized child element type: " + value.getClass().getCanonicalName());
}
childName = RuntimeChildUndeclaredExtensionDefinition.createExtensionChildName(childDef);
} else {
childDef = extDef.getChildElementDefinitionByDatatype(value.getClass());
if (childDef == null) {
throw new ConfigurationException("Unable to encode extension, unrecognized child element type: " + value.getClass().getCanonicalName());
}
}
encodeChildElementToStreamWriter(resource, eventWriter, extDef, value, childName,
childDef, null, includedResource, null, encodeContext);
}
// child extensions
encodeExtensionsIfPresent(resource, eventWriter, next, includedResource, encodeContext);
eventWriter.finish();
writeCommentsPost(eventWriter, next);
}
}
private void encodeExtensionsIfPresent(final IBaseResource resource,
final StreamRDF writer,
final IBase element,
final boolean includedResource,
final EncodeContext encodeContext) {
if (element instanceof ISupportsUndeclaredExtensions) {
ISupportsUndeclaredExtensions res = (ISupportsUndeclaredExtensions) element;
encodeUndeclaredExtensions(resource, writer, toBaseExtensionList(res.getUndeclaredExtensions()), includedResource, encodeContext);
encodeUndeclaredExtensions(resource, writer, toBaseExtensionList(res.getUndeclaredModifierExtensions()), includedResource, encodeContext);
}
if (element instanceof IBaseHasExtensions) {
IBaseHasExtensions res = (IBaseHasExtensions) element;
encodeUndeclaredExtensions(resource, writer, res.getExtension(), includedResource, encodeContext);
}
if (element instanceof IBaseHasModifierExtensions) {
IBaseHasModifierExtensions res = (IBaseHasModifierExtensions) element;
encodeUndeclaredExtensions(resource, writer, res.getModifierExtension(), includedResource, encodeContext);
}
}
private void encodeExtension(final IBaseResource resource,
final StreamRDF eventWriter,
final boolean containedResource,
final CompositeChildElement nextChildElem,
final BaseRuntimeChildDefinition nextChild,
final IBase nextValue,
final String childName,
final String extensionUrl,
final BaseRuntimeElementDefinition<?> childDef,
final EncodeContext encodeContext) {
BaseRuntimeDeclaredChildDefinition extDef = (BaseRuntimeDeclaredChildDefinition) nextChild;
eventWriter.start();
String elementId = getCompositeElementId(nextValue);
if (isNotBlank(elementId)) {
eventWriter.triple(RDFUtil.triple("<id> " + elementId + " </id>"));
}
eventWriter.triple(RDFUtil.triple("<url> " + extensionUrl + " </url>"));
encodeChildElementToStreamWriter(resource, eventWriter, nextChild, nextValue, childName,
childDef, null, containedResource, nextChildElem, encodeContext);
eventWriter.finish();
}
private void encodeCompositeElementToStreamWriter(final IBaseResource resource,
final IBase element,
final StreamRDF streamRDF,
final boolean containedResource,
final CompositeChildElement parent,
final EncodeContext encodeContext) {
for (CompositeChildElement nextChildElem : super.compositeChildIterator(element, containedResource, parent, encodeContext)) {
BaseRuntimeChildDefinition nextChild = nextChildElem.getDef();
if (nextChild.getElementName().equals("url") && element instanceof IBaseExtension) {
/*
* RDF encoding is a one-off for extensions. The URL element goes in an attribute
* instead of being encoded as a normal element, only for RDF encoding
*/
continue;
}
if (nextChild instanceof RuntimeChildNarrativeDefinition) {
INarrativeGenerator gen = this.context.getNarrativeGenerator();
INarrative narr;
if (resource instanceof IResource) {
narr = ((IResource) resource).getText();
} else if (resource instanceof IDomainResource) {
narr = ((IDomainResource) resource).getText();
} else {
narr = null;
}
assert narr != null;
if (gen != null && narr.isEmpty()) {
gen.populateResourceNarrative(this.context, resource);
}
if (!narr.isEmpty()) {
RuntimeChildNarrativeDefinition child = (RuntimeChildNarrativeDefinition) nextChild;
String childName = nextChild.getChildNameByDatatype(child.getDatatype());
BaseRuntimeElementDefinition<?> type = child.getChildByName(childName);
encodeChildElementToStreamWriter(resource,
streamRDF, nextChild, narr, childName, type, null,
containedResource, nextChildElem, encodeContext);
continue;
}
}
if (nextChild instanceof RuntimeChildContainedResources) {
encodeChildElementToStreamWriter(resource, streamRDF, nextChild, null,
nextChild.getChildNameByDatatype(null),
nextChild.getChildElementDefinitionByDatatype(null), null,
containedResource, nextChildElem, encodeContext);
} else {
List<? extends IBase> values = nextChild.getAccessor().getValues(element);
values = super.preProcessValues(nextChild, resource, values, nextChildElem, encodeContext);
if (values == null || values.isEmpty()) {
continue;
}
for (IBase nextValue : values) {
if ((nextValue == null || nextValue.isEmpty())) {
continue;
}
ChildNameAndDef childNameAndDef = super.getChildNameAndDef(nextChild, nextValue);
if (childNameAndDef == null) {
continue;
}
String childName = childNameAndDef.getChildName();
BaseRuntimeElementDefinition<?> childDef = childNameAndDef.getChildDef();
String extensionUrl = getExtensionUrl(nextChild.getExtensionUrl());
if (extensionUrl != null && !childName.equals("extension")) {
encodeExtension(resource, streamRDF, containedResource, nextChildElem, nextChild,
nextValue, childName, extensionUrl, childDef, encodeContext);
} else if (nextChild instanceof RuntimeChildExtension) {
IBaseExtension<?, ?> extension = (IBaseExtension<?, ?>) nextValue;
if ((extension.getValue() == null || extension.getValue().isEmpty())) {
if (extension.getExtension().isEmpty()) {
continue;
}
}
encodeChildElementToStreamWriter(resource, streamRDF, nextChild, nextValue,
childName, childDef, getExtensionUrl(extension.getUrl()),
containedResource, nextChildElem, encodeContext);
} else if (!(nextChild instanceof RuntimeChildNarrativeDefinition) || !containedResource) {
encodeChildElementToStreamWriter(resource, streamRDF, nextChild, nextValue,
childName, childDef, extensionUrl, containedResource, nextChildElem, encodeContext);
}
}
}
}
}
private <Q extends IBaseExtension<?, ?>> List<IBaseExtension<?, ?>> toBaseExtensionList(final List<Q> theList) {
List<IBaseExtension<?, ?>> retVal = new ArrayList<>(theList.size());
retVal.addAll(theList);
return retVal;
}
private <T extends IBaseResource> T parseResource(Class<T> resourceType, StreamRDF streamReader) {
ParserState<T> parserState = ParserState.getPreResourceInstance(this, resourceType, context, false, getErrorHandler());
return doRDFLoop(streamReader, parserState);
}
private <T> T doRDFLoop(StreamRDF streamReader, ParserState<T> parserState) {
logger.trace("Entering RDF parsing loop with state: {}", parserState);
return parserState.getObject();
}
}

View File

@ -43,6 +43,7 @@ public class Constants {
*/ */
public static final Set<String> CORS_ALLWED_METHODS; public static final Set<String> CORS_ALLWED_METHODS;
public static final String CT_FHIR_JSON = "application/json+fhir"; public static final String CT_FHIR_JSON = "application/json+fhir";
public static final String CT_RDF_TURTLE = "application/x-turtle";
/** /**
* The FHIR MimeType for JSON encoding in FHIR DSTU3+ * The FHIR MimeType for JSON encoding in FHIR DSTU3+
*/ */
@ -71,6 +72,9 @@ public class Constants {
public static final String FORMAT_HTML = "html"; public static final String FORMAT_HTML = "html";
public static final String FORMAT_JSON = "json"; public static final String FORMAT_JSON = "json";
public static final String FORMAT_XML = "xml"; public static final String FORMAT_XML = "xml";
public static final String FORMAT_TURTLE = "text/turtle";
/** /**
* "text/html" and "html" * "text/html" and "html"
*/ */

View File

@ -42,16 +42,25 @@ public enum EncodingEnum {
public IParser newParser(FhirContext theContext) { public IParser newParser(FhirContext theContext) {
return theContext.newXmlParser(); return theContext.newXmlParser();
} }
},
RDF(Constants.CT_RDF_TURTLE, Constants.CT_RDF_TURTLE, Constants.FORMAT_TURTLE) {
@Override
public IParser newParser(FhirContext theContext) {
return theContext.newRDFParser();
}
}; };
/** /**
* "json" * "json"
*/ */
public static final String JSON_PLAIN_STRING = "json"; public static final String JSON_PLAIN_STRING = "json";
/** /**
* "xml" * "xml"
*/ */
public static final String XML_PLAIN_STRING = "xml"; public static final String XML_PLAIN_STRING = "xml";
private static Map<String, EncodingEnum> ourContentTypeToEncoding; private static Map<String, EncodingEnum> ourContentTypeToEncoding;
private static Map<String, EncodingEnum> ourContentTypeToEncodingLegacy; private static Map<String, EncodingEnum> ourContentTypeToEncodingLegacy;
private static Map<String, EncodingEnum> ourContentTypeToEncodingStrict; private static Map<String, EncodingEnum> ourContentTypeToEncodingStrict;
@ -127,9 +136,9 @@ public enum EncodingEnum {
return myResourceContentTypeNonLegacy; return myResourceContentTypeNonLegacy;
} }
public abstract IParser newParser(FhirContext theContext); public abstract IParser newParser(final FhirContext theContext);
public static EncodingEnum detectEncoding(String theBody) { public static EncodingEnum detectEncoding(final String theBody) {
EncodingEnum retVal = detectEncodingNoDefault(theBody); EncodingEnum retVal = detectEncodingNoDefault(theBody);
retVal = ObjectUtils.defaultIfNull(retVal, EncodingEnum.XML); retVal = ObjectUtils.defaultIfNull(retVal, EncodingEnum.XML);
return retVal; return retVal;
@ -158,7 +167,7 @@ public enum EncodingEnum {
* even if the "+fhir" part is missing from the expected content type. * even if the "+fhir" part is missing from the expected content type.
* </p> * </p>
*/ */
public static EncodingEnum forContentType(String theContentType) { public static EncodingEnum forContentType(final String theContentType) {
String contentTypeSplitted = getTypeWithoutCharset(theContentType); String contentTypeSplitted = getTypeWithoutCharset(theContentType);
if (contentTypeSplitted == null) { if (contentTypeSplitted == null) {
return null; return null;
@ -177,7 +186,7 @@ public enum EncodingEnum {
* *
* @see #forContentType(String) * @see #forContentType(String)
*/ */
public static EncodingEnum forContentTypeStrict(String theContentType) { public static EncodingEnum forContentTypeStrict(final String theContentType) {
String contentTypeSplitted = getTypeWithoutCharset(theContentType); String contentTypeSplitted = getTypeWithoutCharset(theContentType);
if (contentTypeSplitted == null) { if (contentTypeSplitted == null) {
return null; return null;
@ -186,7 +195,7 @@ public enum EncodingEnum {
} }
} }
private static String getTypeWithoutCharset(String theContentType) { private static String getTypeWithoutCharset(final String theContentType) {
if (theContentType == null) { if (theContentType == null) {
return null; return null;
} else { } else {
@ -198,7 +207,7 @@ public enum EncodingEnum {
/** /**
* Is the given type a FHIR legacy (pre-DSTU3) content type? * Is the given type a FHIR legacy (pre-DSTU3) content type?
*/ */
public static boolean isLegacy(String theContentType) { public static boolean isLegacy(final String theContentType) {
String contentTypeSplitted = getTypeWithoutCharset(theContentType); String contentTypeSplitted = getTypeWithoutCharset(theContentType);
if (contentTypeSplitted == null) { if (contentTypeSplitted == null) {
return false; return false;

View File

@ -165,6 +165,13 @@ public class ParametersUtil {
} }
@SuppressWarnings("unchecked")
public static void addParameterToParametersCode(FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) {
IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("code").newInstance();
value.setValue(theValue);
addParameterToParameters(theCtx, theParameters, theName, value);
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static void addParameterToParametersInteger(FhirContext theCtx, IBaseParameters theParameters, String theName, int theValue) { public static void addParameterToParametersInteger(FhirContext theCtx, IBaseParameters theParameters, String theName, int theValue) {
IPrimitiveType<Integer> count = (IPrimitiveType<Integer>) theCtx.getElementDefinition("integer").newInstance(); IPrimitiveType<Integer> count = (IPrimitiveType<Integer>) theCtx.getElementDefinition("integer").newInstance();
@ -184,6 +191,13 @@ public class ParametersUtil {
IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("string").newInstance(); IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("string").newInstance();
value.setValue(theValue); value.setValue(theValue);
addParameterToParameters(theCtx, theParameters, theName, value); addParameterToParameters(theCtx, theParameters, theName, value);
}
@SuppressWarnings("unchecked")
public static void addParameterToParametersUri(FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) {
IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("uri").newInstance();
value.setValue(theValue);
addParameterToParameters(theCtx, theParameters, theName, value);
} }

View File

@ -221,7 +221,7 @@ public class StopWatch {
* *
* @see #formatThroughput(int, TimeUnit) * @see #formatThroughput(int, TimeUnit)
*/ */
public double getThroughput(int theNumOperations, TimeUnit theUnit) { public double getThroughput(long theNumOperations, TimeUnit theUnit) {
if (theNumOperations <= 0) { if (theNumOperations <= 0) {
return 0.0f; return 0.0f;
} }
@ -229,10 +229,9 @@ public class StopWatch {
long millisElapsed = Math.max(1, getMillis()); long millisElapsed = Math.max(1, getMillis());
long periodMillis = theUnit.toMillis(1); long periodMillis = theUnit.toMillis(1);
double numerator = theNumOperations;
double denominator = ((double) millisElapsed) / ((double) periodMillis); double denominator = ((double) millisElapsed) / ((double) periodMillis);
return numerator / denominator; return (double) theNumOperations / denominator;
} }
public void restart() { public void restart() {

View File

@ -63,15 +63,21 @@ public class ValidateUtil {
} }
} }
public static void isNotNullOrThrowUnprocessableEntity(Object theObject, String theMessage, Object... theValues) {
if (theObject == null) {
throw new UnprocessableEntityException(String.format(theMessage, theValues));
}
}
public static void isNotTooLongOrThrowIllegalArgument(String theString, int theMaxLength, String theMessage) { public static void isNotTooLongOrThrowIllegalArgument(String theString, int theMaxLength, String theMessage) {
if (length(theString) > theMaxLength) { if (length(theString) > theMaxLength) {
throw new IllegalArgumentException(theMessage); throw new IllegalArgumentException(theMessage);
} }
} }
public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage) { public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage, Object... theValues) {
if (theSuccess == false) { if (theSuccess == false) {
throw new InvalidRequestException(theMessage); throw new InvalidRequestException(String.format(theMessage, theValues));
} }
} }

View File

@ -0,0 +1,61 @@
package ca.uhn.fhir.util.rdf;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.apache.commons.io.input.ReaderInputStream;
import org.apache.commons.io.output.WriterOutputStream;
import org.apache.jena.graph.Triple;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.riot.Lang;
import org.apache.jena.riot.system.StreamRDF;
import org.apache.jena.riot.system.StreamRDFWriter;
import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
public class RDFUtil {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RDFUtil.class);
private static final Map<String, Integer> VALID_ENTITY_NAMES;
static {
HashMap<String, Integer> validEntityNames = new HashMap<>(1448);
VALID_ENTITY_NAMES = Collections.unmodifiableMap(validEntityNames);
}
public static StreamRDF createRDFWriter(final Writer writer, final Lang lang) {
WriterOutputStream wos = new WriterOutputStream(writer, Charset.defaultCharset());
return StreamRDFWriter.getWriterStream(wos, lang);
}
public static StreamRDF createRDFReader(final Reader reader, final Lang lang) {
ReaderInputStream ris = new ReaderInputStream(reader, Charset.defaultCharset());
return StreamRDFWriter.getWriterStream(null, lang);
}
public static Triple triple(String tripleAsTurtle) {
Model m = ModelFactory.createDefaultModel();
m.read(new StringReader(tripleAsTurtle), "urn:x-base:", "TURTLE");
return m.listStatements().next().asTriple();
}
}

View File

@ -0,0 +1,82 @@
package ca.uhn.fhir.util.rdf;
import java.util.HashSet;
import java.util.LinkedList;
import org.apache.jena.graph.Triple;
import org.apache.jena.riot.system.StreamRDF;
import org.apache.jena.sparql.core.Quad;
/**
* Wraps another {@link StreamRDF} and attempts to remove duplicate
* triples and quads. To maintain streaming, duplicates are only
* removed within a sliding window of configurable size. Default
* size is 10000 triples and quads.
*/
public class StreamRDFDedup implements StreamRDF {
private final StreamRDF wrapped;
private final int windowSize;
private final HashSet<Object> tripleAndQuadCache;
private final LinkedList<Object> tripleAndQuadList = new LinkedList<Object>();
public StreamRDFDedup(StreamRDF wrapped) {
this(wrapped, 10000);
}
public StreamRDFDedup(StreamRDF wrapped, int windowSize) {
this.wrapped = wrapped;
this.windowSize = windowSize;
// Initial capacity big enough to avoid rehashing
this.tripleAndQuadCache = new HashSet<Object>(windowSize * 3 / 2);
}
@Override
public void start() {
wrapped.start();
}
@Override
public void triple(Triple triple) {
if (!seen(triple)) {
wrapped.triple(triple);
}
}
@Override
public void quad(Quad quad) {
if (!seen(quad)) {
wrapped.quad(quad);
}
}
@Override
public void base(String base) {
wrapped.base(base);
}
@Override
public void prefix(String prefix, String iri) {
wrapped.prefix(prefix, iri);
}
@Override
public void finish() {
wrapped.finish();
}
private boolean seen(Object tuple) {
if (tripleAndQuadCache.contains(tuple)) {
return true;
}
tripleAndQuadCache.add(tuple);
tripleAndQuadList.add(tuple);
if (tripleAndQuadList.size() > windowSize) {
forgetOldest();
}
return false;
}
private void forgetOldest() {
tripleAndQuadCache.remove(tripleAndQuadList.removeFirst());
}
}

View File

@ -0,0 +1,90 @@
package ca.uhn.fhir.util.rdf;
import java.io.OutputStream;
import java.util.Iterator;
import java.util.Map.Entry;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.graph.Triple;
import org.apache.jena.riot.system.RiotLib;
import org.apache.jena.riot.system.StreamOps;
import org.apache.jena.riot.system.StreamRDF;
import org.apache.jena.riot.writer.WriterStreamRDFBlocks;
import org.apache.jena.riot.writer.WriterStreamRDFPlain;
import org.apache.jena.shared.PrefixMapping;
import org.apache.jena.shared.impl.PrefixMappingImpl;
import org.apache.jena.vocabulary.RDF;
/**
* Writes an iterator over triples to N-Triples or Turtle
* in a streaming fashion, that is, without needing to hold
* the entire thing in memory.
* <p>
* Instances are single-use.
* <p>
* There doesn't seem to be a pre-packaged version of this
* functionality in Jena/ARQ that doesn't require a Graph or Model.
*/
public class StreamingRDFWriter {
private final OutputStream out;
private final Iterator<Triple> triples;
private int dedupWindowSize = 10000;
public StreamingRDFWriter(OutputStream out, Iterator<Triple> triples) {
this.out = out;
this.triples = triples;
}
public void setDedupWindowSize(int newSize) {
this.dedupWindowSize = newSize;
}
public void writeNTriples() {
StreamRDF writer = new WriterStreamRDFPlain(new IndentedWriter(out));
if (dedupWindowSize > 0) {
writer = new StreamRDFDedup(writer, dedupWindowSize);
}
writer.start();
StreamOps.sendTriplesToStream(triples, writer);
writer.finish();
}
public void writeTurtle(String baseIRI, PrefixMapping prefixes, boolean writeBase) {
// Auto-register RDF prefix so that rdf:type is displayed well
// All other prefixes come from the query and should be as author intended
prefixes = ensureRDFPrefix(prefixes);
if (writeBase) {
// Jena's streaming Turtle writers don't output base even if it is provided,
// so we write it directly.
IndentedWriter w = new IndentedWriter(out);
RiotLib.writeBase(w, baseIRI);
w.flush();
}
StreamRDF writer = new WriterStreamRDFBlocks(out);
if (dedupWindowSize > 0) {
writer = new StreamRDFDedup(writer, dedupWindowSize);
}
writer.start();
writer.base(baseIRI);
for (Entry<String, String> e : prefixes.getNsPrefixMap().entrySet()) {
writer.prefix(e.getKey(), e.getValue());
}
StreamOps.sendTriplesToStream(triples, writer);
writer.finish();
}
private PrefixMapping ensureRDFPrefix(PrefixMapping prefixes) {
// Some prefix already registered for the RDF namespace -- good enough
if (prefixes.getNsURIPrefix(RDF.getURI()) != null) return prefixes;
// rdf: is registered to something else -- give up
if (prefixes.getNsPrefixURI("rdf") != null) return prefixes;
// Register rdf:
PrefixMapping newPrefixes = new PrefixMappingImpl();
newPrefixes.setNsPrefixes(prefixes);
newPrefixes.setNsPrefix("rdf", RDF.getURI());
return newPrefixes;
}
}

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.util;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import org.junit.Test; import org.junit.Test;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@ -69,4 +70,18 @@ public class ValidateUtilTest {
} }
} }
@Test
public void testIsNotNull() {
ValidateUtil.isNotNullOrThrowUnprocessableEntity("aa", "");
try {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(null, "The message %s", "123");
fail();
} catch (UnprocessableEntityException e) {
assertEquals("The message 123", e.getMessage());
}
}
} }

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc; import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.util.ParametersUtil;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
@ -56,6 +57,7 @@ public class UploadTerminologyCommand extends BaseCommand {
addBaseUrlOption(options); addBaseUrlOption(options);
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")"); addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)"); addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)");
addOptionalOption(options, null, "custom", false, "Indicates that this upload uses the HAPI FHIR custom external terminology format");
addBasicAuthOption(options); addBasicAuthOption(options);
addVerboseLoggingOption(options); addVerboseLoggingOption(options);
@ -78,23 +80,13 @@ public class UploadTerminologyCommand extends BaseCommand {
} }
IGenericClient client = super.newClient(theCommandLine); IGenericClient client = super.newClient(theCommandLine);
IBaseParameters inputParameters; IBaseParameters inputParameters = ParametersUtil.newInstance(myFhirCtx);
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) { ParametersUtil.addParameterToParametersUri(myFhirCtx, inputParameters, "url", termUrl);
org.hl7.fhir.dstu3.model.Parameters p = new org.hl7.fhir.dstu3.model.Parameters();
p.addParameter().setName("url").setValue(new org.hl7.fhir.dstu3.model.UriType(termUrl));
for (String next : datafile) { for (String next : datafile) {
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.dstu3.model.StringType(next)); ParametersUtil.addParameterToParametersString(myFhirCtx, inputParameters, "localfile", next);
} }
inputParameters = p; if (theCommandLine.hasOption("custom")) {
} else if (ctx.getVersion().getVersion() == FhirVersionEnum.R4) { ParametersUtil.addParameterToParametersCode(myFhirCtx, inputParameters, "contentMode", "custom");
org.hl7.fhir.r4.model.Parameters p = new org.hl7.fhir.r4.model.Parameters();
p.addParameter().setName("url").setValue(new org.hl7.fhir.r4.model.UriType(termUrl));
for (String next : datafile) {
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.r4.model.StringType(next));
}
inputParameters = p;
} else {
throw new ParseException("This command does not support FHIR version " + ctx.getVersion().getVersion());
} }
if (theCommandLine.hasOption(VERBOSE_LOGGING_PARAM)) { if (theCommandLine.hasOption(VERBOSE_LOGGING_PARAM)) {

View File

@ -220,7 +220,7 @@ implements IRestfulServer<JaxRsRequest>, IResourceProvider {
* @see <a href="https://www.hl7.org/fhir/http.html#read">https://www.hl7.org/fhir/http.html#read</a> * @see <a href="https://www.hl7.org/fhir/http.html#read">https://www.hl7.org/fhir/http.html#read</a>
*/ */
@GET @GET
@Path("/{id}") @Path("/{id : ((?!_history).)*}")
public Response find(@PathParam("id") final String id) public Response find(@PathParam("id") final String id)
throws IOException { throws IOException {
return execute(getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.READ).id(id)); return execute(getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.READ).id(id));
@ -245,7 +245,7 @@ implements IRestfulServer<JaxRsRequest>, IResourceProvider {
} }
/** /**
* Retrieve the update history for a particular resource * Retrieve a version of a resource
* *
* @param id the id of the resource * @param id the id of the resource
* @param version the version of the resource * @param version the version of the resource
@ -254,12 +254,41 @@ implements IRestfulServer<JaxRsRequest>, IResourceProvider {
*/ */
@GET @GET
@Path("/{id}/_history/{version}") @Path("/{id}/_history/{version}")
public Response findHistory(@PathParam("id") final String id, @PathParam("version") final String version) public Response findVersion(@PathParam("id") final String id, @PathParam("version") final String version)
throws IOException { throws IOException {
final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.VREAD).id(id).version(version); final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.VREAD).id(id).version(version);
return execute(theRequest); return execute(theRequest);
} }
/**
* Retrieve the update history for a particular resource
*
* @param id the id of the resource
* @return the response
* @see <a href="https://www.hl7.org/fhir/http.html#history">https://www.hl7.org/fhir/http.html#history</a>
*/
@GET
@Path("/{id}/_history")
public Response historyForInstance(@PathParam("id") final String id)
throws IOException {
final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.HISTORY_INSTANCE).id(id);
return execute(theRequest);
}
/**
* Retrieve the update history for a particular type
*
* @return the response
* @see <a href="https://www.hl7.org/fhir/http.html#history">https://www.hl7.org/fhir/http.html#history</a>
*/
@GET
@Path("/_history")
public Response historyForType()
throws IOException {
final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.HISTORY_TYPE);
return execute(theRequest);
}
/** /**
* Compartment Based Access * Compartment Based Access
* *
@ -270,7 +299,7 @@ implements IRestfulServer<JaxRsRequest>, IResourceProvider {
* @see <a href="https://www.hl7.org/fhir/compartments.html#compartment">https://www.hl7.org/fhir/compartments.html#compartment</a> * @see <a href="https://www.hl7.org/fhir/compartments.html#compartment">https://www.hl7.org/fhir/compartments.html#compartment</a>
*/ */
@GET @GET
@Path("/{id}/{compartment}") @Path("/{id}/{compartment : ((?!_history).)*}")
public Response findCompartment(@PathParam("id") final String id, @PathParam("compartment") final String compartment) public Response findCompartment(@PathParam("id") final String id, @PathParam("compartment") final String compartment)
throws IOException { throws IOException {
final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE).id(id).compartment( final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE).id(id).compartment(

View File

@ -22,6 +22,7 @@ import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.param.StringAndListParam; import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -36,6 +37,7 @@ import org.mockito.Matchers;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@ -83,6 +85,12 @@ public class AbstractJaxRsResourceProviderTest {
return theResource; return theResource;
} }
private Patient createPatient(long id, String version) {
Patient theResource = new Patient();
theResource.setId(new IdDt(id).withVersion(version));
return theResource;
}
private List<Patient> createPatients(int firstId, int lastId) { private List<Patient> createPatients(int firstId, int lastId) {
List<Patient> result = new ArrayList<Patient>(lastId - firstId); List<Patient> result = new ArrayList<Patient>(lastId - firstId);
for (long i = firstId; i <= lastId; i++) { for (long i = firstId; i <= lastId; i++) {
@ -341,7 +349,7 @@ public class AbstractJaxRsResourceProviderTest {
@Test @Test
public void testVRead() { public void testVRead() {
when(mock.findHistory(idCaptor.capture())).thenReturn(createPatient(1)); when(mock.findVersion(idCaptor.capture())).thenReturn(createPatient(1));
final Patient patient = client.vread(Patient.class, "1", "2"); final Patient patient = client.vread(Patient.class, "1", "2");
compareResultId(1, patient); compareResultId(1, patient);
compareResultUrl("/Patient/1", patient); compareResultUrl("/Patient/1", patient);
@ -349,6 +357,26 @@ public class AbstractJaxRsResourceProviderTest {
assertEquals("2", idCaptor.getValue().getVersionIdPart()); assertEquals("2", idCaptor.getValue().getVersionIdPart());
} }
@Test
public void testInstanceHistory() {
when(mock.getHistoryForInstance(idCaptor.capture())).thenReturn(new SimpleBundleProvider(Collections.singletonList(createPatient(1, "1"))));
final Bundle bundle = client.history().onInstance(new IdDt("Patient", 1L)).returnBundle(Bundle.class).execute();
Patient patient = (Patient) bundle.getEntryFirstRep().getResource();
compareResultId(1, patient);
compareResultUrl("/Patient/1/_history/1", patient);
assertEquals("1", idCaptor.getValue().getIdPart());
assertNull(idCaptor.getValue().getVersionIdPart());
}
@Test
public void testTypeHistory() {
when(mock.getHistoryForType()).thenReturn(new SimpleBundleProvider(Collections.singletonList(createPatient(1, "1"))));
final Bundle bundle = client.history().onType(Patient.class).returnBundle(Bundle.class).execute();
Patient patient = (Patient) bundle.getEntryFirstRep().getResource();
compareResultId(1, patient);
compareResultUrl("/Patient/1/_history/1", patient);
}
@Test @Test
public void testXFindUnknownPatient() { public void testXFindUnknownPatient() {
try { try {

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jaxrs.server.test; package ca.uhn.fhir.jaxrs.server.test;
import java.util.Collections;
import java.util.Date;
import java.util.List; import java.util.List;
import javax.ejb.Stateless; import javax.ejb.Stateless;
@ -8,6 +10,11 @@ import javax.ws.rs.*;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import org.hl7.fhir.instance.model.api.IIdType;
import org.mockito.Mockito; import org.mockito.Mockito;
import ca.uhn.fhir.jaxrs.server.AbstractJaxRsResourceProvider; import ca.uhn.fhir.jaxrs.server.AbstractJaxRsResourceProvider;
@ -68,8 +75,18 @@ public class TestJaxRsMockPatientRestProvider extends AbstractJaxRsResourceProvi
} }
@Read(version = true) @Read(version = true)
public Patient findHistory(@IdParam final IdDt theId) { public Patient findVersion(@IdParam final IdDt theId) {
return mock.findHistory(theId); return mock.findVersion(theId);
}
@History
public IBundleProvider getHistoryForInstance(@IdParam IIdType theId) {
return mock.getHistoryForInstance(theId);
}
@History
public IBundleProvider getHistoryForType() {
return mock.getHistoryForType();
} }
@Create @Create

View File

@ -19,6 +19,9 @@ import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.annotation.*; import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.*; import ca.uhn.fhir.rest.server.*;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
@ -89,7 +92,7 @@ public class JaxRsPatientRestProvider extends AbstractJaxRsResourceProvider<Pati
} }
@Read(version = true) @Read(version = true)
public Patient findHistory(@IdParam final IdDt theId) { public Patient findVersion(@IdParam final IdDt theId) {
if (patients.containsKey(theId.getIdPart())) { if (patients.containsKey(theId.getIdPart())) {
final List<Patient> list = patients.get(theId.getIdPart()); final List<Patient> list = patients.get(theId.getIdPart());
for (final Patient patient : list) { for (final Patient patient : list) {
@ -101,6 +104,17 @@ public class JaxRsPatientRestProvider extends AbstractJaxRsResourceProvider<Pati
throw new ResourceNotFoundException(theId); throw new ResourceNotFoundException(theId);
} }
// from BaseJpaResourceProvider
@History
public IBundleProvider getHistoryForInstance(@IdParam IdDt theId, @Since Date theSince, @At DateRangeParam theAt, RequestDetails theRequestDetails) {
return new SimpleBundleProvider(Collections.emptyList(), "myTestId");
}
@History
public IBundleProvider getHistoryForType(@Since Date theSince, @At DateRangeParam theAt, RequestDetails theRequestDetails) {
return new SimpleBundleProvider(Collections.emptyList(), "myTestId");
}
@Operation(name = "firstVersion", idempotent = true, returnParameters = { @OperationParam(name = "return", type = StringDt.class) }) @Operation(name = "firstVersion", idempotent = true, returnParameters = { @OperationParam(name = "return", type = StringDt.class) })
public Parameters firstVersion(@IdParam final IdDt theId, @OperationParam(name = "dummy") StringDt dummyInput) { public Parameters firstVersion(@IdParam final IdDt theId, @OperationParam(name = "dummy") StringDt dummyInput) {
Parameters parameters = new Parameters(); Parameters parameters = new Parameters();

View File

@ -10,6 +10,9 @@ import javax.ws.rs.*;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -90,7 +93,7 @@ public class JaxRsPatientRestProviderDstu3 extends AbstractJaxRsResourceProvider
} }
@Read(version = true) @Read(version = true)
public Patient findHistory(@IdParam final IdType theId) { public Patient findVersion(@IdParam final IdType theId) {
if (patients.containsKey(theId.getIdPart())) { if (patients.containsKey(theId.getIdPart())) {
final List<Patient> list = patients.get(theId.getIdPart()); final List<Patient> list = patients.get(theId.getIdPart());
for (final Patient patient : list) { for (final Patient patient : list) {
@ -102,6 +105,16 @@ public class JaxRsPatientRestProviderDstu3 extends AbstractJaxRsResourceProvider
throw new ResourceNotFoundException(theId); throw new ResourceNotFoundException(theId);
} }
@History
public IBundleProvider getHistoryForInstance(@IdParam IdType theId, @Since Date theSince, @At DateRangeParam theAt, RequestDetails theRequestDetails) {
return new SimpleBundleProvider(Collections.emptyList(), "myTestId");
}
@History
public IBundleProvider getHistoryForType(@Since Date theSince, @At DateRangeParam theAt, RequestDetails theRequestDetails) {
return new SimpleBundleProvider(Collections.emptyList(), "myTestId");
}
@Operation(name = "firstVersion", idempotent = true, returnParameters = { @OperationParam(name = "return", type = StringType.class) }) @Operation(name = "firstVersion", idempotent = true, returnParameters = { @OperationParam(name = "return", type = StringType.class) })
public Parameters firstVersion(@IdParam final IdType theId, @OperationParam(name = "dummy") StringType dummyInput) { public Parameters firstVersion(@IdParam final IdType theId, @OperationParam(name = "dummy") StringType dummyInput) {
Parameters parameters = new Parameters(); Parameters parameters = new Parameters();

View File

@ -0,0 +1,241 @@
package ca.uhn.fhir.jaxrs.server.example;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.api.AddProfileTagEnum;
import ca.uhn.fhir.context.api.BundleInclusionRule;
import ca.uhn.fhir.jaxrs.server.AbstractJaxRsResourceProvider;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.ETagSupportEnum;
import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*;
import javax.ejb.Local;
import javax.ejb.Stateless;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* A demo JaxRs Patient Rest Provider
*/
@Local
@Path(JaxRsPatientRestProviderR4.PATH)
@Stateless
@Produces({ MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML })
public class JaxRsPatientRestProviderR4 extends AbstractJaxRsResourceProvider<Patient> {
private static Long counter = 1L;
/**
* The HAPI paging provider for this server
*/
public static final IPagingProvider PAGE_PROVIDER;
static final String PATH = "/Patient";
private static final ConcurrentHashMap<String, List<Patient>> patients = new ConcurrentHashMap<String, List<Patient>>();
static {
PAGE_PROVIDER = new FifoMemoryPagingProvider(10);
}
static {
patients.put(String.valueOf(counter), createPatient("Van Houte"));
patients.put(String.valueOf(counter), createPatient("Agnew"));
for (int i = 0; i < 20; i++) {
patients.put(String.valueOf(counter), createPatient("Random Patient " + counter));
}
}
public JaxRsPatientRestProviderR4() {
super(FhirContext.forDstu3(), JaxRsPatientRestProviderR4.class);
}
@Create
public MethodOutcome create(@ResourceParam final Patient patient, @ConditionalUrlParam String theConditional) throws Exception {
patients.put("" + counter, createPatient(patient));
final MethodOutcome result = new MethodOutcome().setCreated(true);
result.setResource(patient);
result.setId(new IdType(patient.getId()));
return result;
}
@Delete
public MethodOutcome delete(@IdParam final IdType theId) {
final Patient deletedPatient = find(theId);
patients.remove(deletedPatient.getIdElement().getIdPart());
final MethodOutcome result = new MethodOutcome().setCreated(true);
result.setResource(deletedPatient);
return result;
}
@Read
public Patient find(@IdParam final IdType theId) {
if (patients.containsKey(theId.getIdPart())) {
return getLast(patients.get(theId.getIdPart()));
} else {
throw new ResourceNotFoundException(theId);
}
}
@Read(version = true)
public Patient findVersion(@IdParam final IdType theId) {
if (patients.containsKey(theId.getIdPart())) {
final List<Patient> list = patients.get(theId.getIdPart());
for (final Patient patient : list) {
if (patient.getIdElement().getVersionIdPartAsLong().equals(theId.getVersionIdPartAsLong())) {
return patient;
}
}
}
throw new ResourceNotFoundException(theId);
}
@History
public IBundleProvider getHistoryForInstance(@IdParam IdType theId, @Since Date theSince, @At DateRangeParam theAt, RequestDetails theRequestDetails) {
return new SimpleBundleProvider(Collections.emptyList(), "myTestId");
}
@History
public IBundleProvider getHistoryForType(@Since Date theSince, @At DateRangeParam theAt, RequestDetails theRequestDetails) {
return new SimpleBundleProvider(Collections.emptyList(), "myTestId");
}
@Operation(name = "firstVersion", idempotent = true, returnParameters = { @OperationParam(name = "return", type = StringType.class) })
public Parameters firstVersion(@IdParam final IdType theId, @OperationParam(name = "dummy") StringType dummyInput) {
Parameters parameters = new Parameters();
Patient patient = find(new IdType(theId.getResourceType(), theId.getIdPart(), "0"));
parameters.addParameter().setName("return").setResource(patient).setValue(new StringType((counter - 1) + "" + "inputVariable [ " + dummyInput.getValue() + "]"));
return parameters;
}
@Override
public AddProfileTagEnum getAddProfileTag() {
return AddProfileTagEnum.NEVER;
}
@Override
public BundleInclusionRule getBundleInclusionRule() {
return BundleInclusionRule.BASED_ON_INCLUDES;
}
@Override
public ETagSupportEnum getETagSupport() {
return ETagSupportEnum.DISABLED;
}
/** THE DEFAULTS */
@Override
public List<IServerInterceptor> getInterceptors_() {
return Collections.emptyList();
}
private Patient getLast(final List<Patient> list) {
return list.get(list.size() - 1);
}
@Override
public IPagingProvider getPagingProvider() {
return PAGE_PROVIDER;
}
@Override
public Class<Patient> getResourceType() {
return Patient.class;
}
@Override
public boolean isDefaultPrettyPrint() {
return true;
}
@GET
@Path("/{id}/$firstVersion")
public Response operationFirstVersionUsingGet(@PathParam("id") String id) throws IOException {
return customOperation(null, RequestTypeEnum.GET, id, "$firstVersion", RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE);
}
@POST
@Path("/{id}/$firstVersion")
public Response operationFirstVersionUsingGet(@PathParam("id") String id, final String resource) throws Exception {
return customOperation(resource, RequestTypeEnum.POST, id, "$firstVersion", RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE);
}
@Search
public List<Patient> search(@RequiredParam(name = Patient.SP_NAME) final StringParam name) {
final List<Patient> result = new LinkedList<Patient>();
for (final List<Patient> patientIterator : patients.values()) {
Patient single = null;
for (Patient patient : patientIterator) {
if (name == null || patient.getName().get(0).getFamilyElement().getValueNotNull().equals(name.getValueNotNull())) {
single = patient;
}
}
if (single != null) {
result.add(single);
}
}
return result;
}
@Search(compartmentName = "Condition")
public List<IBaseResource> searchCompartment(@IdParam IdType thePatientId) {
List<IBaseResource> retVal = new ArrayList<IBaseResource>();
Condition condition = new Condition();
condition.setId(new IdType("665577"));
retVal.add(condition);
return retVal;
}
@Update
public MethodOutcome update(@IdParam final IdType theId, @ResourceParam final Patient patient) {
final String idPart = theId.getIdPart();
if (patients.containsKey(idPart)) {
final List<Patient> patientList = patients.get(idPart);
final Patient lastPatient = getLast(patientList);
patient.setId(createId(theId.getIdPartAsLong(), lastPatient.getIdElement().getVersionIdPartAsLong() + 1));
patientList.add(patient);
final MethodOutcome result = new MethodOutcome().setCreated(false);
result.setResource(patient);
result.setId(new IdType(patient.getId()));
return result;
} else {
throw new ResourceNotFoundException(theId);
}
}
private static IdType createId(final Long id, final Long theVersionId) {
return new IdType("Patient", "" + id, "" + theVersionId);
}
private static List<Patient> createPatient(final Patient patient) {
patient.setId(createId(counter, 1L));
final LinkedList<Patient> list = new LinkedList<Patient>();
list.add(patient);
counter++;
return list;
}
private static List<Patient> createPatient(final String name) {
final Patient patient = new Patient();
patient.getName().add(new HumanName().setFamily(name));
return createPatient(patient);
}
}

View File

@ -5,6 +5,7 @@ import static org.junit.Assert.*;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import ca.uhn.fhir.model.primitive.IdDt;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletContextHandler;
@ -285,4 +286,16 @@ public class JaxRsPatientProviderDstu3Test {
System.out.println(patient); System.out.println(patient);
} }
@Test
public void testInstanceHistory() {
final Bundle history = client.history().onInstance(new IdDt("Patient", 1L)).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getIdElement().getIdPart());
}
@Test
public void testTypeHistory() {
final Bundle history = client.history().onType(Patient.class).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getIdElement().getIdPart());
}
} }

View File

@ -0,0 +1,308 @@
package ca.uhn.fhir.jaxrs.server.example;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jaxrs.client.JaxRsRestfulClientFactory;
import ca.uhn.fhir.model.primitive.BoundCodeDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.SearchStyleEnum;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.test.utilities.JettyUtil;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.r4.model.*;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
public class JaxRsPatientProviderR4Test {
private static IGenericClient client;
private static FhirContext ourCtx = FhirContext.forR4();
private static final String PATIENT_NAME = "Van Houte";
private static int ourPort;
private static Server jettyServer;
@AfterClass
public static void afterClassClearContext() throws Exception {
JettyUtil.closeServer(jettyServer);
TestUtil.clearAllStaticFieldsForUnitTest();
}
@BeforeClass
public static void setUpClass()
throws Exception {
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
jettyServer = new Server(0);
jettyServer.setHandler(context);
ServletHolder jerseyServlet = context.addServlet(org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher.class, "/*");
jerseyServlet.setInitOrder(0);
//@formatter:off
jerseyServlet.setInitParameter("resteasy.resources",
StringUtils.join(Arrays.asList(
JaxRsConformanceProvider.class.getCanonicalName(),
JaxRsPatientRestProvider.class.getCanonicalName(),
JaxRsPageProvider.class.getCanonicalName()
), ","));
//@formatter:on
JettyUtil.startServer(jettyServer);
ourPort = JettyUtil.getPortForStartedServer(jettyServer);
ourCtx.setRestfulClientFactory(new JaxRsRestfulClientFactory(ourCtx));
ourCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
ourCtx.getRestfulClientFactory().setSocketTimeout(1200 * 1000);
client = ourCtx.newRestfulGenericClient("http://localhost:" + ourPort + "/");
client.setEncoding(EncodingEnum.JSON);
client.registerInterceptor(new LoggingInterceptor(true));
}
/** Search/Query - Type */
@Test
public void findUsingGenericClientBySearch() {
// Perform a search
final Bundle results = client
.search()
.forResource(Patient.class)
.where(Patient.NAME.matchesExactly().value(PATIENT_NAME))
.returnBundle(Bundle.class)
.execute();
System.out.println(results.getEntry().get(0));
assertEquals(results.getEntry().size(), 1);
}
/** Search - Multi-valued Parameters (ANY/OR) */
@Test
public void findUsingGenericClientBySearchWithMultiValues() {
final Bundle response = client
.search()
.forResource(Patient.class)
.where(Patient.ADDRESS.matches().values("Toronto")).and(Patient.ADDRESS.matches().values("Ontario"))
.and(Patient.ADDRESS.matches().values("Canada"))
.where(Patient.IDENTIFIER.exactly().systemAndIdentifier("SHORTNAME", "TOYS"))
.returnBundle(Bundle.class)
.execute();
System.out.println(response.getEntry().get(0));
}
/** Search - Paging */
@Test
public void findWithPaging() {
// Perform a search
final Bundle results = client.search().forResource(Patient.class).limitTo(8).returnBundle(Bundle.class).execute();
System.out.println(results.getEntry().size());
if (results.getLink(Bundle.LINK_NEXT) != null) {
// load next page
final Bundle nextPage = client.loadPage().next(results).execute();
System.out.println(nextPage.getEntry().size());
}
}
/** */
@Test
public void testSearchPost() {
Bundle response = client.search()
.forResource("Patient")
.usingStyle(SearchStyleEnum.POST)
.returnBundle(Bundle.class)
.execute();
assertTrue(response.getEntry().size() > 0);
}
/** Search - Compartments */
@Test
public void testSearchCompartements() {
Bundle response = client.search()
.forResource(Patient.class)
.withIdAndCompartment("1", "Condition")
.returnBundle(Bundle.class)
.execute();
assertTrue(response.getEntry().size() > 0);
}
/** Search - Subsetting (_summary and _elements) */
@Test
@Ignore
public void testSummary() {
client.search()
.forResource(Patient.class)
.returnBundle(Bundle.class)
.execute();
}
@Test
public void testCreatePatient() {
final Patient existing = new Patient();
existing.setId((IdDt) null);
existing.getNameFirstRep().setFamily("Created Patient 54");
client.setEncoding(EncodingEnum.JSON);
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
System.out.println(results.getId());
final Patient patient = (Patient) results.getResource();
System.out.println(patient);
assertNotNull(client.read().resource(Patient.class).withId(patient.getId()));
client.setEncoding(EncodingEnum.JSON);
}
/** Conditional Creates */
@Test
public void testConditionalCreate() {
final Patient existing = new Patient();
existing.setId((IdDt) null);
existing.getNameFirstRep().setFamily("Created Patient 54");
client.setEncoding(EncodingEnum.XML);
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
System.out.println(results.getId());
final Patient patient = (Patient) results.getResource();
client.create()
.resource(patient)
.conditional()
.where(Patient.IDENTIFIER.exactly().identifier(patient.getIdentifierFirstRep().toString()))
.execute();
}
/** Find By Id */
@Test
public void findUsingGenericClientById() {
final Patient results = client.read().resource(Patient.class).withId("1").execute();
assertEquals(results.getIdElement().getIdPartAsLong().longValue(), 1L);
}
@Test
public void testUpdateById() {
final Patient existing = client.read().resource(Patient.class).withId("1").execute();
final List<HumanName> name = existing.getName();
name.get(0).addSuffix("The Second");
existing.setName(name);
client.setEncoding(EncodingEnum.XML);
final MethodOutcome results = client.update().resource(existing).withId("1").execute();
}
@Test
public void testDeletePatient() {
final Patient existing = new Patient();
existing.getNameFirstRep().setFamily("Created Patient XYZ");
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
System.out.println(results.getId());
final Patient patient = (Patient) results.getResource();
client.delete().resource(patient).execute();
try {
client.read().resource(Patient.class).withId(patient.getId()).execute();
fail();
} catch (final Exception e) {
// assertEquals(e.getStatusCode(), Constants.STATUS_HTTP_404_NOT_FOUND);
}
}
/** Transaction - Server */
@Ignore
@Test
public void testTransaction() {
Bundle bundle = new Bundle();
Bundle.BundleEntryComponent entry = bundle.addEntry();
final Patient existing = new Patient();
existing.getNameFirstRep().setFamily("Created with bundle");
entry.setResource(existing);
BoundCodeDt<BundleEntryTransactionMethodEnum> theTransactionOperation = new BoundCodeDt(
BundleEntryTransactionMethodEnum.VALUESET_BINDER,
BundleEntryTransactionMethodEnum.POST);
Bundle response = client.transaction().withBundle(bundle).execute();
}
/** Conformance - Server */
@Test
@Ignore
public void testConformance() {
final CapabilityStatement caps = client.capabilities().ofType(CapabilityStatement.class).execute();
System.out.println(caps.getRest().get(0).getResource().get(0).getType());
assertEquals(caps.getRest().get(0).getResource().get(0).getType().toString(), "Patient");
}
/** Extended Operations */
// Create a client to talk to the HeathIntersections server
@Test
public void testExtendedOperations() {
client.registerInterceptor(new LoggingInterceptor(true));
// Create the input parameters to pass to the server
Parameters inParams = new Parameters();
inParams.addParameter().setName("start").setValue(new DateTimeType("2001-01-01"));
inParams.addParameter().setName("end").setValue(new DateTimeType("2015-03-01"));
inParams.addParameter().setName("dummy").setValue(new StringType("myAwesomeDummyValue"));
// Invoke $everything on "Patient/1"
Parameters outParams = client
.operation()
.onInstance(new IdDt("Patient", "1"))
.named("$firstVersion")
.withParameters(inParams)
// .useHttpGet() // Use HTTP GET instead of POST
.execute();
String resultValue = outParams.getParameter().get(0).getValue().toString();
System.out.println(resultValue);
assertEquals("expected but found : " + resultValue, resultValue.contains("myAwesomeDummyValue"), true);
}
@Test
public void testExtendedOperationsUsingGet() {
// Create the input parameters to pass to the server
Parameters inParams = new Parameters();
inParams.addParameter().setName("start").setValue(new DateTimeType("2001-01-01"));
inParams.addParameter().setName("end").setValue(new DateTimeType("2015-03-01"));
inParams.addParameter().setName("dummy").setValue(new StringType("myAwesomeDummyValue"));
// Invoke $everything on "Patient/1"
Parameters outParams = client
.operation()
.onInstance(new IdDt("Patient", "1"))
.named("$firstVersion")
.withParameters(inParams)
.useHttpGet() // Use HTTP GET instead of POST
.execute();
String resultValue = outParams.getParameter().get(0).getValue().toString();
System.out.println(resultValue);
assertEquals("expected but found : " + resultValue, resultValue.contains("myAwesomeDummyValue"), true);
}
@Test
public void testVRead() {
final Patient patient = client.read().resource(Patient.class).withIdAndVersion("1", "1").execute();
System.out.println(patient);
}
@Test
public void testRead() {
final Patient patient = client.read().resource(Patient.class).withId("1").execute();
System.out.println(patient);
}
@Test
public void testInstanceHistory() {
final Bundle history = client.history().onInstance(new IdDt("Patient", 1L)).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getIdElement().getIdPart());
}
@Test
public void testTypeHistory() {
final Bundle history = client.history().onType(Patient.class).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getIdElement().getIdPart());
}
}

View File

@ -291,4 +291,15 @@ public class JaxRsPatientProviderTest {
System.out.println(patient); System.out.println(patient);
} }
@Test
public void testInstanceHistory() {
final Bundle history = client.history().onInstance(new IdDt("Patient", 1L)).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getId().getIdPart());
}
@Test
public void testTypeHistory() {
final Bundle history = client.history().onType(Patient.class).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getId().getIdPart());
}
} }

View File

@ -50,15 +50,12 @@ import static org.apache.commons.lang3.StringUtils.length;
@Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED") @Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED")
}) })
public class TermConcept implements Serializable { public class TermConcept implements Serializable {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class);
private static final long serialVersionUID = 1L;
public static final int MAX_CODE_LENGTH = 500; public static final int MAX_CODE_LENGTH = 500;
public static final int MAX_DESC_LENGTH = 400; public static final int MAX_DESC_LENGTH = 400;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class);
private static final long serialVersionUID = 1L;
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {}) @OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {})
private Collection<TermConceptParentChildLink> myChildren; private List<TermConceptParentChildLink> myChildren;
@Column(name = "CODEVAL", nullable = false, length = MAX_CODE_LENGTH) @Column(name = "CODEVAL", nullable = false, length = MAX_CODE_LENGTH)
@Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),}) @Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),})
@ -178,7 +175,7 @@ public class TermConcept implements Serializable {
return b.isEquals(); return b.isEquals();
} }
public Collection<TermConceptParentChildLink> getChildren() { public List<TermConceptParentChildLink> getChildren() {
if (myChildren == null) { if (myChildren == null) {
myChildren = new ArrayList<>(); myChildren = new ArrayList<>();
} }

View File

@ -177,6 +177,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
public IBaseParameters uploadExternalCodeSystem( public IBaseParameters uploadExternalCodeSystem(
HttpServletRequest theServletRequest, HttpServletRequest theServletRequest,
@OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType<String> theCodeSystemUrl, @OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType<String> theCodeSystemUrl,
@OperationParam(name = "contentMode", min = 0, typeName = "code") IPrimitiveType<String> theContentMode,
@OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theLocalFile, @OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theLocalFile,
@OperationParam(name = "package", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> thePackage, @OperationParam(name = "package", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> thePackage,
RequestDetails theRequestDetails RequestDetails theRequestDetails
@ -245,11 +246,15 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
} }
} }
String url = theCodeSystemUrl != null ? theCodeSystemUrl.getValue() : null; String codeSystemUrl = theCodeSystemUrl != null ? theCodeSystemUrl.getValue() : null;
url = defaultString(url); codeSystemUrl = defaultString(codeSystemUrl);
String contentMode = theContentMode != null ? theContentMode.getValue() : null;
UploadStatistics stats; UploadStatistics stats;
switch (url) { if ("custom".equals(contentMode)) {
stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
} else {
switch (codeSystemUrl) {
case IHapiTerminologyLoaderSvc.SCT_URI: case IHapiTerminologyLoaderSvc.SCT_URI:
stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails); stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
break; break;
@ -260,7 +265,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails); stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
break; break;
default: default:
throw new InvalidRequestException("Unknown URL: " + url); throw new InvalidRequestException("Unknown URL: " + codeSystemUrl);
}
} }
IBaseParameters retVal = ParametersUtil.newInstance(myCtx); IBaseParameters retVal = ParametersUtil.newInstance(myCtx);

View File

@ -39,6 +39,9 @@ public interface IHapiTerminologyLoaderSvc {
UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails); UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
// FIXME: remove the default implementation before 4.0.0
default UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) { return null; };
interface FileDescriptor { interface FileDescriptor {
String getFilename(); String getFilename();

View File

@ -4,15 +4,19 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.term.custom.ConceptHandler;
import ca.uhn.fhir.jpa.term.custom.HierarchyHandler;
import ca.uhn.fhir.jpa.term.loinc.*; import ca.uhn.fhir.jpa.term.loinc.*;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
import ca.uhn.fhir.jpa.util.Counter; import ca.uhn.fhir.jpa.util.Counter;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ValidateUtil;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVFormat;
@ -83,15 +87,19 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
public static final String LOINC_GROUP_FILE = "Group.csv"; public static final String LOINC_GROUP_FILE = "Group.csv";
public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv"; public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv";
public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv"; public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv";
public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv";
public static final String CUSTOM_HIERARCHY_FILE = "hierarchy.csv";
public static final String CUSTOM_CODESYSTEM_JSON = "codesystem.json";
public static final String CUSTOM_CODESYSTEM_XML = "codesystem.xml";
private static final int LOG_INCREMENT = 1000; private static final int LOG_INCREMENT = 1000;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
@Autowired @Autowired
private IHapiTerminologySvc myTermSvc; private IHapiTerminologySvc myTermSvc;
@Autowired(required = false)
private IHapiTerminologySvcDstu3 myTermSvcDstu3; // FYI: Hardcoded to R4 because that's what the term svc uses internally
@Autowired(required = false) private final FhirContext myCtx = FhirContext.forR4();
private IHapiTerminologySvcR4 myTermSvcR4;
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) { private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) {
@ -165,7 +173,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
int nextLoggedCount = 0; int nextLoggedCount = 0;
while (iter.hasNext()) { while (iter.hasNext()) {
CSVRecord nextRecord = iter.next(); CSVRecord nextRecord = iter.next();
if (nextRecord.isConsistent()==false) { if (nextRecord.isConsistent() == false) {
continue; continue;
} }
theHandler.accept(nextRecord); theHandler.accept(nextRecord);
@ -203,8 +211,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
ourLog.info("Beginning IMGTHLA processing"); ourLog.info("Beginning IMGTHLA processing");
return processImgthlaFiles(descriptors, theRequestDetails); return processImgthlaFiles(descriptors, theRequestDetails);
} } finally {
finally {
IOUtils.closeQuietly(descriptors); IOUtils.closeQuietly(descriptors);
} }
} }
@ -257,6 +264,67 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
} }
} }
@Override
public UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
final Map<String, TermConcept> code2concept = new HashMap<>();
IRecordHandler handler;
Optional<String> codeSystemContent = loadFile(descriptors, CUSTOM_CODESYSTEM_JSON, CUSTOM_CODESYSTEM_XML);
CodeSystem codeSystem;
if (codeSystemContent.isPresent()) {
codeSystem = EncodingEnum
.detectEncoding(codeSystemContent.get())
.newParser(myCtx)
.parseResource(CodeSystem.class, codeSystemContent.get());
ValidateUtil.isTrueOrThrowInvalidRequest(theSystem.equalsIgnoreCase(codeSystem.getUrl()), "CodeSystem.url does not match the supplied system: %s", theSystem);
ValidateUtil.isTrueOrThrowInvalidRequest(CodeSystem.CodeSystemContentMode.NOTPRESENT.equals(codeSystem.getContent()), "CodeSystem.content does not match the expected value: %s", CodeSystem.CodeSystemContentMode.NOTPRESENT.toCode());
} else {
codeSystem = new CodeSystem();
codeSystem.setUrl(theSystem);
codeSystem.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
}
TermCodeSystemVersion csv = new TermCodeSystemVersion();
// Concept File
handler = new ConceptHandler(code2concept, csv);
iterateOverZipFile(descriptors, CUSTOM_CONCEPTS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Hierarchy
if (descriptors.hasFile(CUSTOM_HIERARCHY_FILE)) {
handler = new HierarchyHandler(code2concept);
iterateOverZipFile(descriptors, CUSTOM_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
}
// Add root concepts to CodeSystemVersion
for (TermConcept nextConcept : code2concept.values()) {
if (nextConcept.getParents().isEmpty()) {
csv.getConcepts().add(nextConcept);
}
}
IIdType target = storeCodeSystem(theRequestDetails, csv, codeSystem, null, null);
return new UploadStatistics(code2concept.size(), target);
}
}
private Optional<String> loadFile(LoadedFileDescriptors theDescriptors, String... theFilenames) {
for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) {
for (String nextFilename : theFilenames) {
if (next.getFilename().endsWith(nextFilename)) {
try {
String contents = IOUtils.toString(next.getInputStream(), Charsets.UTF_8);
return Optional.of(contents);
} catch (IOException e) {
throw new InternalErrorException(e);
}
}
}
}
return Optional.empty();
}
UploadStatistics processImgthlaFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) { UploadStatistics processImgthlaFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) {
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion(); final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
final Map<String, TermConcept> code2concept = new HashMap<>(); final Map<String, TermConcept> code2concept = new HashMap<>();
@ -285,13 +353,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) { for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
String nextFilename = nextZipBytes.getFilename(); String nextFilename = nextZipBytes.getFilename();
if(!IMGTHLA_HLA_NOM_TXT.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT) if (!IMGTHLA_HLA_NOM_TXT.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT)
&& !IMGTHLA_HLA_XML.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) { && !IMGTHLA_HLA_XML.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) {
ourLog.info("Skipping unexpected file {}", nextFilename); ourLog.info("Skipping unexpected file {}", nextFilename);
continue; continue;
} }
if(IMGTHLA_HLA_NOM_TXT.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT)) { if (IMGTHLA_HLA_NOM_TXT.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT)) {
// process colon-delimited hla_nom.txt file // process colon-delimited hla_nom.txt file
ourLog.info("Processing file {}", nextFilename); ourLog.info("Processing file {}", nextFilename);
@ -309,20 +377,20 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
} }
LineNumberReader lnr = new LineNumberReader(reader); LineNumberReader lnr = new LineNumberReader(reader);
while(lnr.readLine() != null) {} while (lnr.readLine() != null) {
}
ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber()); ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber());
} catch (IOException e) { } catch (IOException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} } finally {
finally {
IOUtils.closeQuietly(reader); IOUtils.closeQuietly(reader);
} }
foundHlaNom = true; foundHlaNom = true;
} }
if(IMGTHLA_HLA_XML.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) { if (IMGTHLA_HLA_XML.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) {
// process hla.xml file // process hla.xml file
ourLog.info("Processing file {}", nextFilename); ourLog.info("Processing file {}", nextFilename);
@ -340,13 +408,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
} }
LineNumberReader lnr = new LineNumberReader(reader); LineNumberReader lnr = new LineNumberReader(reader);
while(lnr.readLine() != null) {} while (lnr.readLine() != null) {
}
ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber()); ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber());
} catch (IOException e) { } catch (IOException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} } finally {
finally {
IOUtils.closeQuietly(reader); IOUtils.closeQuietly(reader);
} }
@ -553,11 +621,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
return new UploadStatistics(code2concept.size(), target); return new UploadStatistics(code2concept.size(), target);
} }
@VisibleForTesting
void setTermSvcDstu3ForUnitTest(IHapiTerminologySvcDstu3 theTermSvcDstu3) {
myTermSvcDstu3 = theTermSvcDstu3;
}
@VisibleForTesting @VisibleForTesting
void setTermSvcForUnitTests(IHapiTerminologySvc theTermSvc) { void setTermSvcForUnitTests(IHapiTerminologySvc theTermSvc) {
myTermSvc = theTermSvc; myTermSvc = theTermSvc;
@ -571,38 +634,12 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
IIdType retVal; IIdType retVal;
myTermSvc.setProcessDeferred(false); myTermSvc.setProcessDeferred(false);
if (myTermSvcDstu3 != null) { retVal = myTermSvc.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
retVal = myTermSvcDstu3.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
} else {
retVal = myTermSvcR4.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
}
myTermSvc.setProcessDeferred(true); myTermSvc.setProcessDeferred(true);
return retVal; return retVal;
} }
public static String firstNonBlank(String... theStrings) {
String retVal = "";
for (String nextString : theStrings) {
if (isNotBlank(nextString)) {
retVal = nextString;
break;
}
}
return retVal;
}
public static TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
TermConcept concept = id2concept.get(id);
if (concept == null) {
concept = new TermConcept();
id2concept.put(id, concept);
concept.setCodeSystemVersion(codeSystemVersion);
}
return concept;
}
static class LoadedFileDescriptors implements Closeable { static class LoadedFileDescriptors implements Closeable {
private List<File> myTemporaryFiles = new ArrayList<>(); private List<File> myTemporaryFiles = new ArrayList<>();
@ -651,6 +688,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
} }
} }
boolean hasFile(String theFilename) {
return myUncompressedFileDescriptors
.stream()
.map(t -> t.getFilename().replaceAll(".*[\\\\/]", "")) // Strip the path from the filename
.anyMatch(t -> t.equals(theFilename));
}
@Override @Override
public void close() { public void close() {
for (File next : myTemporaryFiles) { for (File next : myTemporaryFiles) {
@ -694,4 +738,25 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
} }
public static String firstNonBlank(String... theStrings) {
String retVal = "";
for (String nextString : theStrings) {
if (isNotBlank(nextString)) {
retVal = nextString;
break;
}
}
return retVal;
}
public static TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
TermConcept concept = id2concept.get(id);
if (concept == null) {
concept = new TermConcept();
id2concept.put(id, concept);
concept.setCodeSystemVersion(codeSystemVersion);
}
return concept;
}
} }

View File

@ -0,0 +1,63 @@
package ca.uhn.fhir.jpa.term.custom;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.trim;
public class ConceptHandler implements IRecordHandler {
private static final Logger ourLog = LoggerFactory.getLogger(ConceptHandler.class);
private final Map<String, TermConcept> myCode2Concept;
private final TermCodeSystemVersion myCodeSystemVersion;
public ConceptHandler(Map<String, TermConcept> theCode2concept, TermCodeSystemVersion theCodeSystemVersion) {
myCode2Concept = theCode2concept;
myCodeSystemVersion = theCodeSystemVersion;
}
@Override
public void accept(CSVRecord theRecord) {
String code = trim(theRecord.get("CODE"));
if (isNotBlank(code)) {
String display = trim(theRecord.get("DISPLAY"));
Validate.isTrue(!myCode2Concept.containsKey(code), "The code %s has appeared more than once", code);
TermConcept concept = TerminologyLoaderSvcImpl.getOrCreateConcept(myCodeSystemVersion, myCode2Concept, code);
concept.setCode(code);
concept.setDisplay(display);
myCode2Concept.put(code, concept);
}
}
}

View File

@ -0,0 +1,58 @@
package ca.uhn.fhir.jpa.term.custom;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import ca.uhn.fhir.util.ValidateUtil;
import org.apache.commons.csv.CSVRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.trim;
public class HierarchyHandler implements IRecordHandler {
private final Map<String, TermConcept> myCode2Concept;
public HierarchyHandler(Map<String, TermConcept> theCode2concept) {
myCode2Concept = theCode2concept;
}
@Override
public void accept(CSVRecord theRecord) {
String parent = trim(theRecord.get("PARENT"));
String child = trim(theRecord.get("CHILD"));
if (isNotBlank(parent) && isNotBlank(child)) {
TermConcept parentConcept = myCode2Concept.get(parent);
ValidateUtil.isNotNullOrThrowUnprocessableEntity(parentConcept, "Parent code %s not found", parent);
TermConcept childConcept = myCode2Concept.get(child);
ValidateUtil.isNotNullOrThrowUnprocessableEntity(childConcept, "Child code %s not found", child);
parentConcept.addChild(childConcept, TermConceptParentChildLink.RelationshipTypeEnum.ISA);
}
}
}

View File

@ -0,0 +1,58 @@
package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(MockitoJUnitRunner.class)
abstract class BaseLoaderTest {
@Mock
protected RequestDetails mySrd;
@Captor
protected ArgumentCaptor<List<ConceptMap>> myConceptMapCaptor;
@Captor
protected ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
@Captor
protected ArgumentCaptor<List<ValueSet>> myValueSetsCaptor;
@Captor
protected ArgumentCaptor<CodeSystem> mySystemCaptor;
Map<String, ConceptMap> extractConceptMaps() {
Map<String, ConceptMap> conceptMaps = new HashMap<>();
for (ConceptMap next : myConceptMapCaptor.getAllValues().get(0)) {
conceptMaps.put(next.getId(), next);
}
return conceptMaps;
}
Map<String, TermConcept> extractConcepts() {
Map<String, TermConcept> concepts = new HashMap<>();
for (TermConcept next : myCsvCaptor.getValue().getConcepts()) {
concepts.put(next.getCode(), next);
}
return concepts;
}
Map<String, ValueSet> extractValueSets() {
Map<String, ValueSet> valueSets = new HashMap<>();
for (ValueSet next : myValueSetsCaptor.getValue()) {
valueSets.put(next.getId(), next);
}
return valueSets;
}
}

View File

@ -0,0 +1,115 @@
package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.util.TestUtil;
import org.hl7.fhir.r4.model.CodeSystem;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class TerminologyLoaderSvcCustomTest extends BaseLoaderTest {
private TerminologyLoaderSvcImpl mySvc;
@Mock
private IHapiTerminologySvc myTermSvc;
private ZipCollectionBuilder myFiles;
@Before
public void before() {
mySvc = new TerminologyLoaderSvcImpl();
mySvc.setTermSvcForUnitTests(myTermSvc);
myFiles = new ZipCollectionBuilder();
}
@Test
public void testLoadComplete() throws Exception {
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CODESYSTEM_JSON);
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CONCEPTS_FILE);
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_HIERARCHY_FILE);
// Actually do the load
mySvc.loadCustom("http://example.com/labCodes", myFiles.getFiles(), mySrd);
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
Map<String, TermConcept> concepts = extractConcepts();
// Verify codesystem
assertEquals("http://example.com/labCodes", mySystemCaptor.getValue().getUrl());
assertEquals(CodeSystem.CodeSystemContentMode.NOTPRESENT, mySystemCaptor.getValue().getContent());
assertEquals("Example Lab Codes", mySystemCaptor.getValue().getName());
// Root code
TermConcept code;
assertEquals(2, concepts.size());
code = concepts.get("CHEM");
assertEquals("CHEM", code.getCode());
assertEquals("Chemistry", code.getDisplay());
assertEquals(2, code.getChildren().size());
assertEquals("HB", code.getChildren().get(0).getChild().getCode());
assertEquals("Hemoglobin", code.getChildren().get(0).getChild().getDisplay());
assertEquals("NEUT", code.getChildren().get(1).getChild().getCode());
assertEquals("Neutrophils", code.getChildren().get(1).getChild().getDisplay());
}
@Test
public void testLoadWithNoCodeSystem() throws Exception {
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CONCEPTS_FILE);
// Actually do the load
mySvc.loadCustom("http://example.com/labCodes", myFiles.getFiles(), mySrd);
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
Map<String, TermConcept> concepts = extractConcepts();
// Verify codesystem
assertEquals("http://example.com/labCodes", mySystemCaptor.getValue().getUrl());
assertEquals(CodeSystem.CodeSystemContentMode.NOTPRESENT, mySystemCaptor.getValue().getContent());
}
/**
* No hierarchy file supplied
*/
@Test
public void testLoadCodesOnly() throws Exception {
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CONCEPTS_FILE);
// Actually do the load
mySvc.loadCustom("http://example.com/labCodes", myFiles.getFiles(), mySrd);
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
Map<String, TermConcept> concepts = extractConcepts();
TermConcept code;
// Root code
assertEquals(5, concepts.size());
code = concepts.get("CHEM");
assertEquals("CHEM", code.getCode());
assertEquals("Chemistry", code.getDisplay());
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}

View File

@ -1,6 +1,5 @@
package ca.uhn.fhir.jpa.term; package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
@ -8,9 +7,7 @@ import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.io.IOException; import java.io.IOException;
@ -18,19 +15,12 @@ import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@RunWith(MockitoJUnitRunner.class) public class TerminologyLoaderSvcImgthlaTest extends BaseLoaderTest {
public class TerminologyLoaderSvcImgthlaTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImgthlaTest.class);
private TerminologyLoaderSvcImpl mySvc; private TerminologyLoaderSvcImpl mySvc;
@Mock @Mock
private IHapiTerminologySvc myTermSvc; private IHapiTerminologySvc myTermSvc;
@Mock
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
@Mock
private RequestDetails details;
private ZipCollectionBuilder myFiles; private ZipCollectionBuilder myFiles;
@ -38,7 +28,6 @@ public class TerminologyLoaderSvcImgthlaTest {
public void before() { public void before() {
mySvc = new TerminologyLoaderSvcImpl(); mySvc = new TerminologyLoaderSvcImpl();
mySvc.setTermSvcForUnitTests(myTermSvc); mySvc.setTermSvcForUnitTests(myTermSvc);
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
myFiles = new ZipCollectionBuilder(); myFiles = new ZipCollectionBuilder();
} }
@ -49,7 +38,7 @@ public class TerminologyLoaderSvcImgthlaTest {
// Actually do the load // Actually do the load
try { try {
mySvc.loadImgthla(myFiles.getFiles(), details); mySvc.loadImgthla(myFiles.getFiles(), mySrd);
fail("Expected \"not yet fully implemented\" InternalErrorException"); fail("Expected \"not yet fully implemented\" InternalErrorException");
} catch(InternalErrorException e) { } catch(InternalErrorException e) {
// for now, expect "not yet fully implemented" exception // for now, expect "not yet fully implemented" exception
@ -65,7 +54,7 @@ public class TerminologyLoaderSvcImgthlaTest {
addImgthlaMandatoryFilesToZip(myFiles); addImgthlaMandatoryFilesToZip(myFiles);
// Actually do the load // Actually do the load
mySvc.loadImgthla(myFiles.getFiles(), details); mySvc.loadImgthla(myFiles.getFiles(), mySrd);
// TODO: verify the code system was loaded correctly (similarly to TerminologyLoaderSvcLoincTest.testLoadLoincMandatoryFilesOnly) // TODO: verify the code system was loaded correctly (similarly to TerminologyLoaderSvcLoincTest.testLoadLoincMandatoryFilesOnly)
} }
@ -76,7 +65,7 @@ public class TerminologyLoaderSvcImgthlaTest {
// Actually do the load // Actually do the load
try { try {
mySvc.loadImgthla(myFiles.getFiles(), details); mySvc.loadImgthla(myFiles.getFiles(), mySrd);
fail("Expected UnprocessableEntityException"); fail("Expected UnprocessableEntityException");
} catch (UnprocessableEntityException e) { } catch (UnprocessableEntityException e) {
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:")); assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.term; package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.loinc.*; import ca.uhn.fhir.jpa.term.loinc.*;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
@ -15,46 +14,28 @@ import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
import org.mockito.Captor; import org.mockito.Captor;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class) public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest {
public class TerminologyLoaderSvcLoincTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcLoincTest.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcLoincTest.class);
private TerminologyLoaderSvcImpl mySvc; private TerminologyLoaderSvcImpl mySvc;
@Mock @Mock
private IHapiTerminologySvc myTermSvc; private IHapiTerminologySvc myTermSvc;
@Mock
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
@Captor
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
@Captor @Captor
private ArgumentCaptor<CodeSystem> mySystemCaptor; private ArgumentCaptor<CodeSystem> mySystemCaptor;
@Mock
private RequestDetails details;
@Captor
private ArgumentCaptor<List<ValueSet>> myValueSetsCaptor;
@Captor
private ArgumentCaptor<List<ConceptMap>> myConceptMapCaptor;
private ZipCollectionBuilder myFiles; private ZipCollectionBuilder myFiles;
@ -62,43 +43,18 @@ public class TerminologyLoaderSvcLoincTest {
public void before() { public void before() {
mySvc = new TerminologyLoaderSvcImpl(); mySvc = new TerminologyLoaderSvcImpl();
mySvc.setTermSvcForUnitTests(myTermSvc); mySvc.setTermSvcForUnitTests(myTermSvc);
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
myFiles = new ZipCollectionBuilder(); myFiles = new ZipCollectionBuilder();
} }
private Map<String, ConceptMap> extractConceptMaps() {
Map<String, ConceptMap> conceptMaps = new HashMap<>();
for (ConceptMap next : myConceptMapCaptor.getAllValues().get(0)) {
conceptMaps.put(next.getId(), next);
}
return conceptMaps;
}
private Map<String, TermConcept> extractConcepts() {
Map<String, TermConcept> concepts = new HashMap<>();
for (TermConcept next : myCsvCaptor.getValue().getConcepts()) {
concepts.put(next.getCode(), next);
}
return concepts;
}
private Map<String, ValueSet> extractValueSets() {
Map<String, ValueSet> valueSets = new HashMap<>();
for (ValueSet next : myValueSetsCaptor.getValue()) {
valueSets.put(next.getId(), next);
}
return valueSets;
}
@Test @Test
public void testLoadLoinc() throws Exception { public void testLoadLoinc() throws Exception {
addLoincMandatoryFilesToZip(myFiles); addLoincMandatoryFilesToZip(myFiles);
// Actually do the load // Actually do the load
mySvc.loadLoinc(myFiles.getFiles(), details); mySvc.loadLoinc(myFiles.getFiles(), mySrd);
verify(myTermSvcDstu3, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture()); verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
Map<String, TermConcept> concepts = extractConcepts(); Map<String, TermConcept> concepts = extractConcepts();
Map<String, ValueSet> valueSets = extractValueSets(); Map<String, ValueSet> valueSets = extractValueSets();
Map<String, ConceptMap> conceptMaps = extractConceptMaps(); Map<String, ConceptMap> conceptMaps = extractConceptMaps();
@ -347,9 +303,9 @@ public class TerminologyLoaderSvcLoincTest {
addLoincMandatoryFilesToZip(myFiles); addLoincMandatoryFilesToZip(myFiles);
// Actually do the load // Actually do the load
mySvc.loadLoinc(myFiles.getFiles(), details); mySvc.loadLoinc(myFiles.getFiles(), mySrd);
verify(myTermSvcDstu3, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture()); verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
Map<String, TermConcept> concepts = extractConcepts(); Map<String, TermConcept> concepts = extractConcepts();
Map<String, ValueSet> valueSets = extractValueSets(); Map<String, ValueSet> valueSets = extractValueSets();
Map<String, ConceptMap> conceptMaps = extractConceptMaps(); Map<String, ConceptMap> conceptMaps = extractConceptMaps();
@ -371,7 +327,7 @@ public class TerminologyLoaderSvcLoincTest {
// Actually do the load // Actually do the load
try { try {
mySvc.loadLoinc(myFiles.getFiles(), details); mySvc.loadLoinc(myFiles.getFiles(), mySrd);
fail(); fail();
} catch (UnprocessableEntityException e) { } catch (UnprocessableEntityException e) {
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:")); assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));

View File

@ -4,39 +4,36 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
import org.mockito.Captor; import org.mockito.Captor;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.util.*; import java.util.ArrayList;
import java.util.TreeSet;
import java.util.zip.ZipOutputStream; import java.util.zip.ZipOutputStream;
import static org.hamcrest.Matchers.*; import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.*; import static org.hamcrest.Matchers.hasItem;
import static org.mockito.Matchers.any; import static org.hamcrest.Matchers.not;
import static org.mockito.Matchers.anyListOf; import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock; import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class) public class TerminologyLoaderSvcSnomedCtTest extends BaseLoaderTest {
public class TerminologyLoaderSvcSnomedCtTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcSnomedCtTest.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcSnomedCtTest.class);
private TerminologyLoaderSvcImpl mySvc; private TerminologyLoaderSvcImpl mySvc;
@ -44,15 +41,12 @@ public class TerminologyLoaderSvcSnomedCtTest {
private IHapiTerminologySvc myTermSvc; private IHapiTerminologySvc myTermSvc;
@Captor @Captor
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor; private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
@Mock
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
private ZipCollectionBuilder myFiles; private ZipCollectionBuilder myFiles;
@Before @Before
public void before() { public void before() {
mySvc = new TerminologyLoaderSvcImpl(); mySvc = new TerminologyLoaderSvcImpl();
mySvc.setTermSvcForUnitTests(myTermSvc); mySvc.setTermSvcForUnitTests(myTermSvc);
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
myFiles = new ZipCollectionBuilder(); myFiles = new ZipCollectionBuilder();
} }
@ -84,17 +78,16 @@ public class TerminologyLoaderSvcSnomedCtTest {
myFiles.addFileZip("/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt"); myFiles.addFileZip("/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt");
myFiles.addFileZip("/sct/", "sct2_TextDefinition_Full-en_INT_20160131.txt"); myFiles.addFileZip("/sct/", "sct2_TextDefinition_Full-en_INT_20160131.txt");
RequestDetails details = mock(RequestDetails.class); mySvc.loadSnomedCt(myFiles.getFiles(), mySrd);
mySvc.loadSnomedCt(myFiles.getFiles(), details);
verify(myTermSvcDstu3).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyListOf(ValueSet.class), anyListOf(ConceptMap.class)); verify(myTermSvc).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyList(), anyListOf(ConceptMap.class));
TermCodeSystemVersion csv = myCsvCaptor.getValue(); TermCodeSystemVersion csv = myCsvCaptor.getValue();
TreeSet<String> allCodes = toCodes(csv, true); TreeSet<String> allCodes = toCodes(csv, true);
ourLog.info(allCodes.toString()); ourLog.info(allCodes.toString());
assertThat(allCodes, containsInRelativeOrder("116680003")); assertThat(allCodes, hasItem("116680003"));
assertThat(allCodes, not(containsInRelativeOrder("207527008"))); assertThat(allCodes, not(hasItem("207527008")));
allCodes = toCodes(csv, false); allCodes = toCodes(csv, false);
ourLog.info(allCodes.toString()); ourLog.info(allCodes.toString());
@ -110,8 +103,7 @@ public class TerminologyLoaderSvcSnomedCtTest {
public void testLoadSnomedCtAgainstRealFile() throws Exception { public void testLoadSnomedCtAgainstRealFile() throws Exception {
byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip")); byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
RequestDetails details = mock(RequestDetails.class); mySvc.loadSnomedCt(list(bytes), mySrd);
mySvc.loadSnomedCt(list(bytes), details);
} }
@Test @Test
@ -123,9 +115,8 @@ public class TerminologyLoaderSvcSnomedCtTest {
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length); ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
RequestDetails details = mock(RequestDetails.class);
try { try {
mySvc.loadSnomedCt(list(bos.toByteArray()), details); mySvc.loadSnomedCt(list(bos.toByteArray()), mySrd);
fail(); fail();
} catch (UnprocessableEntityException e) { } catch (UnprocessableEntityException e) {
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input: ")); assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input: "));

View File

@ -0,0 +1,10 @@
{
"resourceType": "CodeSystem",
"url": "http://example.com/labCodes",
"name": "Example Lab Codes",
"description": "A set of lab codes",
"status": "active",
"publisher": "Example Organization Corporation Worldwide",
"date": "2019-07-30",
"content": "not-present"
}

View File

@ -0,0 +1,8 @@
CODE,DISPLAY
CHEM,Chemistry
HB,Hemoglobin
NEUT,Neutrophils
MICRO,Microbiology
C&S,Culture and Sensitivity
1 CODE DISPLAY
2 CHEM Chemistry
3 HB Hemoglobin
4 NEUT Neutrophils
5 MICRO Microbiology
6 C&S Culture and Sensitivity

View File

@ -0,0 +1,6 @@
PARENT,CHILD
CHEM,HB
CHEM,NEUT
MICRO,C&S
1 PARENT CHILD
2 CHEM HB
3 CHEM NEUT
4 MICRO C&S

View File

@ -94,6 +94,10 @@ public class AnyListResource {
return (org.hl7.fhir.r4.model.ListResource) get(); return (org.hl7.fhir.r4.model.ListResource) get();
} }
public FhirVersionEnum getFhirVersion() {
return myFhirVersion;
}
public void addCode(String theSystem, String theCode) { public void addCode(String theSystem, String theCode) {
switch (myFhirVersion) { switch (myFhirVersion) {
case DSTU3: case DSTU3:

View File

@ -186,6 +186,12 @@
</dependency> </dependency>
<!-- UNIT TEST DEPENDENCIES --> <!-- UNIT TEST DEPENDENCIES -->
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
<type>pom</type>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>net.sf.json-lib</groupId> <groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId> <artifactId>json-lib</artifactId>

View File

@ -0,0 +1,281 @@
package ca.uhn.fhir.parser;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.r4.model.*;
import org.junit.AfterClass;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.*;
@Ignore
public class RDFParserR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(RDFParserR4Test.class);
private static FhirContext ourCtx = FhirContext.forR4();
/*
private Bundle createBundleWithPatient() {
Bundle b = new Bundle();
b.setId("BUNDLEID");
b.getMeta().addProfile("http://FOO");
Patient p = new Patient();
p.setId("PATIENTID");
p.getMeta().addProfile("http://BAR");
p.addName().addGiven("GIVEN");
b.addEntry().setResource(p);
return b;
}
*/
@Test
public void testDontStripVersions() {
FhirContext ctx = FhirContext.forR4();
ctx.getParserOptions().setDontStripVersionsFromReferencesAtPaths("QuestionnaireResponse.questionnaire");
QuestionnaireResponse qr = new QuestionnaireResponse();
qr.getQuestionnaireElement().setValueAsString("Questionnaire/123/_history/456");
String output = ctx.newRDFParser().setPrettyPrint(true).encodeResourceToString(qr);
ourLog.info(output);
assertThat(output, containsString("\"Questionnaire/123/_history/456\""));
}
@Test
public void testDuplicateContainedResourcesNotOutputtedTwice() {
MedicationDispense md = new MedicationDispense();
MedicationRequest mr = new MedicationRequest();
md.addAuthorizingPrescription().setResource(mr);
Medication med = new Medication();
md.setMedication(new Reference(med));
mr.setMedication(new Reference(med));
String encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(md);
ourLog.info(encoded);
int idx = encoded.indexOf("\"Medication\"");
assertNotEquals(-1, idx);
idx = encoded.indexOf("\"Medication\"", idx + 1);
assertEquals(-1, idx);
}
/**
* See #814
*/
@Test
public void testDuplicateContainedResourcesNotOutputtedTwiceWithManualIds() {
MedicationDispense md = new MedicationDispense();
MedicationRequest mr = new MedicationRequest();
mr.setId("#MR");
md.addAuthorizingPrescription().setResource(mr);
Medication med = new Medication();
med.setId("#MED");
md.setMedication(new Reference(med));
mr.setMedication(new Reference(med));
String encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(md);
ourLog.info(encoded);
int idx = encoded.indexOf("\"Medication\"");
assertNotEquals(-1, idx);
idx = encoded.indexOf("\"Medication\"", idx + 1);
assertEquals(-1, idx);
}
/*
* See #814
*/
@Test
public void testDuplicateContainedResourcesNotOutputtedTwiceWithManualIdsAndManualAddition() {
MedicationDispense md = new MedicationDispense();
MedicationRequest mr = new MedicationRequest();
mr.setId("#MR");
md.addAuthorizingPrescription().setResource(mr);
Medication med = new Medication();
med.setId("#MED");
Reference medRef = new Reference();
medRef.setReference("#MED");
md.setMedication(medRef);
mr.setMedication(medRef);
md.getContained().add(mr);
md.getContained().add(med);
String encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(md);
ourLog.info(encoded);
int idx = encoded.indexOf("\"Medication\"");
assertNotEquals(-1, idx);
idx = encoded.indexOf("\"Medication\"", idx + 1);
assertEquals(-1, idx);
}
@Test
public void testEncodeAndParseUnicodeCharacterInNarrative() {
Patient p = new Patient();
p.getText().getDiv().setValueAsString("<div>Copy © 1999</div>");
String encoded = ourCtx.newRDFParser().encodeResourceToString(p);
ourLog.info(encoded);
p = (Patient) ourCtx.newRDFParser().parseResource(encoded);
assertEquals("<div xmlns=\"http://www.w3.org/1999/xhtml\">Copy &copy; 1999</div>", p.getText().getDivAsString());
}
@Test
public void testEncodeResourceWithMixedManualAndAutomaticContainedResourcesLocalFirst() {
Observation obs = new Observation();
Patient pt = new Patient();
pt.setId("#1");
pt.addName().setFamily("FAM");
obs.getSubject().setReference("#1");
obs.getContained().add(pt);
Encounter enc = new Encounter();
enc.setStatus(Encounter.EncounterStatus.ARRIVED);
obs.getEncounter().setResource(enc);
String encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(obs);
ourLog.info(encoded);
obs = ourCtx.newRDFParser().parseResource(Observation.class, encoded);
assertEquals("#1", obs.getContained().get(0).getId());
assertEquals("#2", obs.getContained().get(1).getId());
pt = (Patient) obs.getSubject().getResource();
assertEquals("FAM", pt.getNameFirstRep().getFamily());
enc = (Encounter) obs.getEncounter().getResource();
assertEquals(Encounter.EncounterStatus.ARRIVED, enc.getStatus());
}
@Test
public void testEncodeResourceWithMixedManualAndAutomaticContainedResourcesLocalLast() {
Observation obs = new Observation();
Patient pt = new Patient();
pt.addName().setFamily("FAM");
obs.getSubject().setResource(pt);
Encounter enc = new Encounter();
enc.setId("#1");
enc.setStatus(Encounter.EncounterStatus.ARRIVED);
obs.getEncounter().setReference("#1");
obs.getContained().add(enc);
String encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(obs);
ourLog.info(encoded);
obs = ourCtx.newRDFParser().parseResource(Observation.class, encoded);
assertEquals("#1", obs.getContained().get(0).getId());
assertEquals("#2", obs.getContained().get(1).getId());
pt = (Patient) obs.getSubject().getResource();
assertEquals("FAM", pt.getNameFirstRep().getFamily());
enc = (Encounter) obs.getEncounter().getResource();
assertEquals(Encounter.EncounterStatus.ARRIVED, enc.getStatus());
}
@Test
public void testEncodeResourceWithMixedManualAndAutomaticContainedResourcesLocalLast2() {
MedicationRequest mr = new MedicationRequest();
Practitioner pract = new Practitioner().setActive(true);
mr.getRequester().setResource(pract);
String encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(mr);
ourLog.info(encoded);
mr = ourCtx.newRDFParser().parseResource(MedicationRequest.class, encoded);
mr.setMedication(new Reference(new Medication().setStatus(Medication.MedicationStatus.ACTIVE)));
encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(mr);
ourLog.info(encoded);
mr = ourCtx.newRDFParser().parseResource(MedicationRequest.class, encoded);
assertEquals("#2", mr.getContained().get(0).getId());
assertEquals("#1", mr.getContained().get(1).getId());
}
/**
* Test that long JSON strings don't get broken up
*/
@Test
public void testNoBreakInLongString() {
String longString = StringUtils.leftPad("", 100000, 'A');
Patient p = new Patient();
p.addName().setFamily(longString);
String encoded = ourCtx.newRDFParser().setPrettyPrint(true).encodeResourceToString(p);
assertThat(encoded, containsString(longString));
}
@Test
public void testParseAndEncodeExtensionWithValueWithExtension() {
String input = "{\n" +
" \"resourceType\": \"Patient\",\n" +
" \"extension\": [\n" +
" {\n" +
" \"url\": \"https://purl.org/elab/fhir/network/StructureDefinition/1/BirthWeight\",\n" +
" \"_valueDecimal\": {\n" +
" \"extension\": [\n" +
" {\n" +
" \"url\": \"http://www.hl7.org/fhir/extension-data-absent-reason.html\",\n" +
" \"valueCoding\": {\n" +
" \"system\": \"http://hl7.org/fhir/ValueSet/birthweight\",\n" +
" \"code\": \"Underweight\",\n" +
" \"userSelected\": false\n" +
" }\n" +
" }\n" +
" ]\n" +
" }\n" +
" }\n" +
" ],\n" +
" \"identifier\": [\n" +
" {\n" +
" \"system\": \"https://purl.org/elab/fhir/network/StructureDefinition/1/EuroPrevallStudySubjects\",\n" +
" \"value\": \"1\"\n" +
" }\n" +
" ],\n" +
" \"gender\": \"female\"\n" +
"}";
IParser jsonParser = ourCtx.newRDFParser();
IParser xmlParser = ourCtx.newXmlParser();
jsonParser.setDontEncodeElements(Sets.newHashSet("id", "meta"));
xmlParser.setDontEncodeElements(Sets.newHashSet("id", "meta"));
Patient parsed = jsonParser.parseResource(Patient.class, input);
ourLog.info(jsonParser.setPrettyPrint(true).encodeResourceToString(parsed));
assertThat(xmlParser.encodeResourceToString(parsed), containsString("Underweight"));
assertThat(jsonParser.encodeResourceToString(parsed), containsString("Underweight"));
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}

View File

@ -0,0 +1,61 @@
package ca.uhn.fhir.parser;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.json.*;
import org.junit.Ignore;
import org.junit.Test;
import org.hl7.fhir.r4.model.*;
import java.io.StringReader;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@Ignore
public class RDFParserTest {
private static FhirContext ourCtx = FhirContext.forR4();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RDFParserTest.class);
private static final String TEST_STRUCTURELOADING_DATA =
"{" +
" \"resourceType\":\"Organization\"," +
" \"id\":\"11111\"," +
" \"meta\":{" +
" \"lastUpdated\":\"3900-09-20T10:10:10.000-07:00\"" +
" }," +
" \"identifier\":[" +
" {" +
" \"value\":\"15250\"" +
" }" +
" ]," +
" \"type\":{" +
" \"coding\":[" +
" {" +
" \"system\":\"http://test\"," +
" \"code\":\"ins\"," +
" \"display\":\"General Ledger System\"," +
" \"userSelected\":false" +
" }" +
" ]" +
" }," +
" \"name\":\"Acme Investments\"" +
"}";
@Test
public void testDontStripVersions() {
FhirContext ctx = FhirContext.forR4();
ctx.getParserOptions().setDontStripVersionsFromReferencesAtPaths("QuestionnaireResponse.questionnaire");
QuestionnaireResponse qr = new QuestionnaireResponse();
qr.getQuestionnaireElement().setValueAsString("Questionnaire/123/_history/456");
String output = ctx.newRDFParser().setPrettyPrint(true).encodeResourceToString(qr);
ourLog.info(output);
assertThat(output, containsString("\"Questionnaire/123/_history/456\""));
}
}

View File

@ -3,15 +3,17 @@ package org.hl7.fhir.r4.hapi.validation;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.Caffeine;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport; import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.StructureDefinition; import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.terminologies.ValueSetExpander; import org.hl7.fhir.r4.terminologies.ValueSetExpander;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.Function;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class CachingValidationSupport implements IValidationSupport { public class CachingValidationSupport implements IValidationSupport {
@ -21,7 +23,11 @@ public class CachingValidationSupport implements IValidationSupport {
public CachingValidationSupport(IValidationSupport theWrap) { public CachingValidationSupport(IValidationSupport theWrap) {
myWrap = theWrap; myWrap = theWrap;
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build(); myCache = Caffeine
.newBuilder()
.expireAfterWrite(60, TimeUnit.SECONDS)
.maximumSize(5000)
.build();
} }
@Override @Override
@ -31,14 +37,14 @@ public class CachingValidationSupport implements IValidationSupport {
@Override @Override
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) { public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
return (List<IBaseResource>) myCache.get("fetchAllConformanceResources", String key = "fetchAllConformanceResources";
t -> myWrap.fetchAllConformanceResources(theContext)); return loadFromCache(key, t -> myWrap.fetchAllConformanceResources(theContext));
} }
@Override @Override
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) { public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions", String key = "fetchAllStructureDefinitions";
t -> myWrap.fetchAllStructureDefinitions(theContext)); return loadFromCache(key, t -> myWrap.fetchAllStructureDefinitions(theContext));
} }
@Override @Override
@ -63,7 +69,8 @@ public class CachingValidationSupport implements IValidationSupport {
@Override @Override
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) { public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
return myWrap.isCodeSystemSupported(theContext, theSystem); String key = "isCodeSystemSupported " + theSystem;
return loadFromCache(key, t -> myWrap.isCodeSystemSupported(theContext, theSystem));
} }
@Override @Override
@ -73,11 +80,18 @@ public class CachingValidationSupport implements IValidationSupport {
@Override @Override
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) { public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay); String key = "validateCode " + theCodeSystem + " " + theCode;
return loadFromCache(key, t -> myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay));
} }
@Override @Override
public LookupCodeResult lookupCode(FhirContext theContext, String theSystem, String theCode) { public LookupCodeResult lookupCode(FhirContext theContext, String theSystem, String theCode) {
return myWrap.lookupCode(theContext, theSystem, theCode); String key = "lookupCode " + theSystem + " " + theCode;
return loadFromCache(key, t -> myWrap.lookupCode(theContext, theSystem, theCode));
}
@Nullable
private <T> T loadFromCache(String theKey, Function<String, T> theLoader) {
return (T) myCache.get(theKey, theLoader);
} }
} }

15
pom.xml
View File

@ -530,6 +530,15 @@
<id>gteichrow</id> <id>gteichrow</id>
<name>Gary Teichrow</name> <name>Gary Teichrow</name>
</developer> </developer>
<developer>
<id>sethrylan</id>
<name>Seth Rylan Gainey</name>
<url>http://sethrylan.org/</url>
</developer>
<developer>
<id>uurl</id>
<name>Raul Estrada</name>
</developer>
</developers> </developers>
<licenses> <licenses>
@ -971,6 +980,12 @@
<artifactId>httpcore</artifactId> <artifactId>httpcore</artifactId>
<version>${httpcore_version}</version> <version>${httpcore_version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
<version>3.12.0</version>
<type>pom</type>
</dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-highlighter</artifactId> <artifactId>lucene-highlighter</artifactId>

View File

@ -79,6 +79,11 @@
</li>The rule list is now cached on a per-request basis, which should improve performance</ul> </li>The rule list is now cached on a per-request basis, which should improve performance</ul>
]]> ]]>
</action> </action>
<action type="add" issue="1321">
Support has been added for RDF encoding and parsing in the
<![CDATA[<a href="https://www.hl7.org/fhir/rdf.html#instance">Turtle</a>]]>
format. Thanks to Raul Estrada for the pull request!
</action>
<action type="add"> <action type="add">
The $expunge global everything operation has been refactored to do deletes The $expunge global everything operation has been refactored to do deletes
in small batches. This change will likely reduce performance, but does allow in small batches. This change will likely reduce performance, but does allow
@ -354,6 +359,16 @@
type for an operation declared on a plain provider without needing to use type for an operation declared on a plain provider without needing to use
a specific version of the FHIR structures. a specific version of the FHIR structures.
</action> </action>
<action type="add">
The $upload-external-code-system operation and the corresponding HAPI FHIR CLI command
can now be used to upload custom vocabulary that has been converted into a standard file format
defined by HAPI FHIR. This is useful for uploading large organizational code systems.
</action>
<action type="fix" issue="1404">
In the JAX-RS server, the resource type history and instance vread
operations had ambiguous paths that could lead to the wrong method
being called. Thanks to Seth Rylan Gainey for the pull request!
</action>
</release> </release>
<release version="3.8.0" date="2019-05-30" description="Hippo"> <release version="3.8.0" date="2019-05-30" description="Hippo">
<action type="fix"> <action type="fix">