Add support for uploading custom terminology using the
$upload-external-code-system operation
This commit is contained in:
parent
b030d1af31
commit
04ce9cfc1a
|
@ -165,6 +165,13 @@ public class ParametersUtil {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static void addParameterToParametersCode(FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) {
|
||||||
|
IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("code").newInstance();
|
||||||
|
value.setValue(theValue);
|
||||||
|
addParameterToParameters(theCtx, theParameters, theName, value);
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public static void addParameterToParametersInteger(FhirContext theCtx, IBaseParameters theParameters, String theName, int theValue) {
|
public static void addParameterToParametersInteger(FhirContext theCtx, IBaseParameters theParameters, String theName, int theValue) {
|
||||||
IPrimitiveType<Integer> count = (IPrimitiveType<Integer>) theCtx.getElementDefinition("integer").newInstance();
|
IPrimitiveType<Integer> count = (IPrimitiveType<Integer>) theCtx.getElementDefinition("integer").newInstance();
|
||||||
|
@ -184,6 +191,13 @@ public class ParametersUtil {
|
||||||
IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("string").newInstance();
|
IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("string").newInstance();
|
||||||
value.setValue(theValue);
|
value.setValue(theValue);
|
||||||
addParameterToParameters(theCtx, theParameters, theName, value);
|
addParameterToParameters(theCtx, theParameters, theName, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static void addParameterToParametersUri(FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) {
|
||||||
|
IPrimitiveType<String> value = (IPrimitiveType<String>) theCtx.getElementDefinition("uri").newInstance();
|
||||||
|
value.setValue(theValue);
|
||||||
|
addParameterToParameters(theCtx, theParameters, theName, value);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -63,15 +63,21 @@ public class ValidateUtil {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void isNotNullOrThrowUnprocessableEntity(Object theObject, String theMessage, Object... theValues) {
|
||||||
|
if (theObject == null) {
|
||||||
|
throw new UnprocessableEntityException(String.format(theMessage, theValues));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static void isNotTooLongOrThrowIllegalArgument(String theString, int theMaxLength, String theMessage) {
|
public static void isNotTooLongOrThrowIllegalArgument(String theString, int theMaxLength, String theMessage) {
|
||||||
if (length(theString) > theMaxLength) {
|
if (length(theString) > theMaxLength) {
|
||||||
throw new IllegalArgumentException(theMessage);
|
throw new IllegalArgumentException(theMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage) {
|
public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage, Object... theValues) {
|
||||||
if (theSuccess == false) {
|
if (theSuccess == false) {
|
||||||
throw new InvalidRequestException(theMessage);
|
throw new InvalidRequestException(String.format(theMessage, theValues));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.util;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
@ -69,4 +70,18 @@ public class ValidateUtilTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIsNotNull() {
|
||||||
|
ValidateUtil.isNotNullOrThrowUnprocessableEntity("aa", "");
|
||||||
|
|
||||||
|
try {
|
||||||
|
ValidateUtil.isNotNullOrThrowUnprocessableEntity(null, "The message %s", "123");
|
||||||
|
fail();
|
||||||
|
} catch (UnprocessableEntityException e) {
|
||||||
|
assertEquals("The message 123", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||||
|
import ca.uhn.fhir.util.ParametersUtil;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
|
@ -56,6 +57,7 @@ public class UploadTerminologyCommand extends BaseCommand {
|
||||||
addBaseUrlOption(options);
|
addBaseUrlOption(options);
|
||||||
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
|
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
|
||||||
addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)");
|
addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)");
|
||||||
|
addOptionalOption(options, null, "custom", false, "Indicates that this upload uses the HAPI FHIR custom external terminology format");
|
||||||
addBasicAuthOption(options);
|
addBasicAuthOption(options);
|
||||||
addVerboseLoggingOption(options);
|
addVerboseLoggingOption(options);
|
||||||
|
|
||||||
|
@ -78,23 +80,13 @@ public class UploadTerminologyCommand extends BaseCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
IGenericClient client = super.newClient(theCommandLine);
|
IGenericClient client = super.newClient(theCommandLine);
|
||||||
IBaseParameters inputParameters;
|
IBaseParameters inputParameters = ParametersUtil.newInstance(myFhirCtx);
|
||||||
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
ParametersUtil.addParameterToParametersUri(myFhirCtx, inputParameters, "url", termUrl);
|
||||||
org.hl7.fhir.dstu3.model.Parameters p = new org.hl7.fhir.dstu3.model.Parameters();
|
for (String next : datafile) {
|
||||||
p.addParameter().setName("url").setValue(new org.hl7.fhir.dstu3.model.UriType(termUrl));
|
ParametersUtil.addParameterToParametersString(myFhirCtx, inputParameters, "localfile", next);
|
||||||
for (String next : datafile) {
|
}
|
||||||
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.dstu3.model.StringType(next));
|
if (theCommandLine.hasOption("custom")) {
|
||||||
}
|
ParametersUtil.addParameterToParametersCode(myFhirCtx, inputParameters, "contentMode", "custom");
|
||||||
inputParameters = p;
|
|
||||||
} else if (ctx.getVersion().getVersion() == FhirVersionEnum.R4) {
|
|
||||||
org.hl7.fhir.r4.model.Parameters p = new org.hl7.fhir.r4.model.Parameters();
|
|
||||||
p.addParameter().setName("url").setValue(new org.hl7.fhir.r4.model.UriType(termUrl));
|
|
||||||
for (String next : datafile) {
|
|
||||||
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.r4.model.StringType(next));
|
|
||||||
}
|
|
||||||
inputParameters = p;
|
|
||||||
} else {
|
|
||||||
throw new ParseException("This command does not support FHIR version " + ctx.getVersion().getVersion());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (theCommandLine.hasOption(VERBOSE_LOGGING_PARAM)) {
|
if (theCommandLine.hasOption(VERBOSE_LOGGING_PARAM)) {
|
||||||
|
|
|
@ -50,15 +50,12 @@ import static org.apache.commons.lang3.StringUtils.length;
|
||||||
@Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED")
|
@Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED")
|
||||||
})
|
})
|
||||||
public class TermConcept implements Serializable {
|
public class TermConcept implements Serializable {
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class);
|
|
||||||
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
|
|
||||||
public static final int MAX_CODE_LENGTH = 500;
|
public static final int MAX_CODE_LENGTH = 500;
|
||||||
public static final int MAX_DESC_LENGTH = 400;
|
public static final int MAX_DESC_LENGTH = 400;
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class);
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {})
|
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {})
|
||||||
private Collection<TermConceptParentChildLink> myChildren;
|
private List<TermConceptParentChildLink> myChildren;
|
||||||
|
|
||||||
@Column(name = "CODEVAL", nullable = false, length = MAX_CODE_LENGTH)
|
@Column(name = "CODEVAL", nullable = false, length = MAX_CODE_LENGTH)
|
||||||
@Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),})
|
@Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),})
|
||||||
|
@ -178,7 +175,7 @@ public class TermConcept implements Serializable {
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Collection<TermConceptParentChildLink> getChildren() {
|
public List<TermConceptParentChildLink> getChildren() {
|
||||||
if (myChildren == null) {
|
if (myChildren == null) {
|
||||||
myChildren = new ArrayList<>();
|
myChildren = new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
|
@ -160,6 +160,7 @@ public abstract class TerminologyUploaderProvider extends BaseJpaProvider {
|
||||||
public IBaseParameters uploadExternalCodeSystem(
|
public IBaseParameters uploadExternalCodeSystem(
|
||||||
HttpServletRequest theServletRequest,
|
HttpServletRequest theServletRequest,
|
||||||
@OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType<String> theCodeSystemUrl,
|
@OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType<String> theCodeSystemUrl,
|
||||||
|
@OperationParam(name = "contentMode", min = 0, typeName = "code") IPrimitiveType<String> theContentMode,
|
||||||
@OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theLocalFile,
|
@OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theLocalFile,
|
||||||
@OperationParam(name = "package", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> thePackage,
|
@OperationParam(name = "package", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> thePackage,
|
||||||
RequestDetails theRequestDetails
|
RequestDetails theRequestDetails
|
||||||
|
@ -210,7 +211,7 @@ public abstract class TerminologyUploaderProvider extends BaseJpaProvider {
|
||||||
final String url = AttachmentUtil.getOrCreateUrl(myCtx, nextPackage).getValueAsString();
|
final String url = AttachmentUtil.getOrCreateUrl(myCtx, nextPackage).getValueAsString();
|
||||||
|
|
||||||
if (isBlank(url)) {
|
if (isBlank(url)) {
|
||||||
throw new UnprocessableEntityException("Package is missing mandatory url element");
|
throw new UnprocessableEntityException("Package is missing mandatory codeSystemUrl element");
|
||||||
}
|
}
|
||||||
|
|
||||||
localFiles.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
|
localFiles.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
|
||||||
|
@ -228,22 +229,27 @@ public abstract class TerminologyUploaderProvider extends BaseJpaProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
String url = theCodeSystemUrl != null ? theCodeSystemUrl.getValue() : null;
|
String codeSystemUrl = theCodeSystemUrl != null ? theCodeSystemUrl.getValue() : null;
|
||||||
url = defaultString(url);
|
codeSystemUrl = defaultString(codeSystemUrl);
|
||||||
|
|
||||||
|
String contentMode = theContentMode != null ? theContentMode.getValue() : null;
|
||||||
UploadStatistics stats;
|
UploadStatistics stats;
|
||||||
switch (url) {
|
if ("custom".equals(contentMode)) {
|
||||||
case IHapiTerminologyLoaderSvc.SCT_URI:
|
stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
|
||||||
stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
|
} else {
|
||||||
break;
|
switch (codeSystemUrl) {
|
||||||
case IHapiTerminologyLoaderSvc.LOINC_URI:
|
case IHapiTerminologyLoaderSvc.SCT_URI:
|
||||||
stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
|
stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
|
||||||
break;
|
break;
|
||||||
case IHapiTerminologyLoaderSvc.IMGTHLA_URI:
|
case IHapiTerminologyLoaderSvc.LOINC_URI:
|
||||||
stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
|
stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
|
||||||
break;
|
break;
|
||||||
default:
|
case IHapiTerminologyLoaderSvc.IMGTHLA_URI:
|
||||||
throw new InvalidRequestException("Unknown URL: " + url);
|
stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new InvalidRequestException("Unknown URL: " + codeSystemUrl);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
|
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
|
||||||
|
|
|
@ -39,6 +39,8 @@ public interface IHapiTerminologyLoaderSvc {
|
||||||
|
|
||||||
UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
|
UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
interface FileDescriptor {
|
interface FileDescriptor {
|
||||||
|
|
||||||
String getFilename();
|
String getFilename();
|
||||||
|
|
|
@ -4,15 +4,19 @@ import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||||
|
import ca.uhn.fhir.jpa.term.custom.ConceptHandler;
|
||||||
|
import ca.uhn.fhir.jpa.term.custom.HierarchyHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.*;
|
import ca.uhn.fhir.jpa.term.loinc.*;
|
||||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept;
|
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept;
|
||||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription;
|
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription;
|
||||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
|
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
|
||||||
import ca.uhn.fhir.jpa.util.Counter;
|
import ca.uhn.fhir.jpa.util.Counter;
|
||||||
|
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
|
import ca.uhn.fhir.util.ValidateUtil;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.commons.csv.CSVFormat;
|
import org.apache.commons.csv.CSVFormat;
|
||||||
|
@ -83,15 +87,19 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
public static final String LOINC_GROUP_FILE = "Group.csv";
|
public static final String LOINC_GROUP_FILE = "Group.csv";
|
||||||
public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv";
|
public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv";
|
||||||
public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv";
|
public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv";
|
||||||
|
public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv";
|
||||||
|
public static final String CUSTOM_HIERARCHY_FILE = "hierarchy.csv";
|
||||||
|
public static final String CUSTOM_CODESYSTEM_JSON = "codesystem.json";
|
||||||
|
public static final String CUSTOM_CODESYSTEM_XML = "codesystem.xml";
|
||||||
|
|
||||||
private static final int LOG_INCREMENT = 1000;
|
private static final int LOG_INCREMENT = 1000;
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
@Autowired(required = false)
|
|
||||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
// FYI: Hardcoded to R4 because that's what the term svc uses internally
|
||||||
@Autowired(required = false)
|
private final FhirContext myCtx = FhirContext.forR4();
|
||||||
private IHapiTerminologySvcR4 myTermSvcR4;
|
|
||||||
|
|
||||||
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) {
|
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) {
|
||||||
|
|
||||||
|
@ -165,7 +173,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
int nextLoggedCount = 0;
|
int nextLoggedCount = 0;
|
||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
CSVRecord nextRecord = iter.next();
|
CSVRecord nextRecord = iter.next();
|
||||||
if (nextRecord.isConsistent()==false) {
|
if (nextRecord.isConsistent() == false) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
theHandler.accept(nextRecord);
|
theHandler.accept(nextRecord);
|
||||||
|
@ -203,8 +211,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
ourLog.info("Beginning IMGTHLA processing");
|
ourLog.info("Beginning IMGTHLA processing");
|
||||||
|
|
||||||
return processImgthlaFiles(descriptors, theRequestDetails);
|
return processImgthlaFiles(descriptors, theRequestDetails);
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
IOUtils.closeQuietly(descriptors);
|
IOUtils.closeQuietly(descriptors);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -229,15 +236,15 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV,
|
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV,
|
||||||
LOINC_IMAGING_DOCUMENT_CODES_FILE
|
LOINC_IMAGING_DOCUMENT_CODES_FILE
|
||||||
);
|
);
|
||||||
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
|
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
|
||||||
|
|
||||||
List<String> optionalFilenameFragments = Arrays.asList(
|
List<String> optionalFilenameFragments = Arrays.asList(
|
||||||
);
|
);
|
||||||
descriptors.verifyOptionalFilesExist(optionalFilenameFragments);
|
descriptors.verifyOptionalFilesExist(optionalFilenameFragments);
|
||||||
|
|
||||||
ourLog.info("Beginning LOINC processing");
|
ourLog.info("Beginning LOINC processing");
|
||||||
|
|
||||||
return processLoincFiles(descriptors, theRequestDetails);
|
return processLoincFiles(descriptors, theRequestDetails);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,6 +264,67 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||||
|
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||||
|
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||||
|
IRecordHandler handler;
|
||||||
|
|
||||||
|
Optional<String> codeSystemContent = loadFile(descriptors, CUSTOM_CODESYSTEM_JSON, CUSTOM_CODESYSTEM_XML);
|
||||||
|
CodeSystem codeSystem;
|
||||||
|
if (codeSystemContent.isPresent()) {
|
||||||
|
codeSystem = EncodingEnum
|
||||||
|
.detectEncoding(codeSystemContent.get())
|
||||||
|
.newParser(myCtx)
|
||||||
|
.parseResource(CodeSystem.class, codeSystemContent.get());
|
||||||
|
ValidateUtil.isTrueOrThrowInvalidRequest(theSystem.equalsIgnoreCase(codeSystem.getUrl()), "CodeSystem.url does not match the supplied system: %s", theSystem);
|
||||||
|
ValidateUtil.isTrueOrThrowInvalidRequest(CodeSystem.CodeSystemContentMode.NOTPRESENT.equals(codeSystem.getContent()), "CodeSystem.content does not match the expected value: %s", CodeSystem.CodeSystemContentMode.NOTPRESENT.toCode());
|
||||||
|
} else {
|
||||||
|
codeSystem = new CodeSystem();
|
||||||
|
codeSystem.setUrl(theSystem);
|
||||||
|
codeSystem.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||||
|
}
|
||||||
|
|
||||||
|
TermCodeSystemVersion csv = new TermCodeSystemVersion();
|
||||||
|
|
||||||
|
// Concept File
|
||||||
|
handler = new ConceptHandler(code2concept, csv);
|
||||||
|
iterateOverZipFile(descriptors, CUSTOM_CONCEPTS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
|
// Hierarchy
|
||||||
|
if (descriptors.hasFile(CUSTOM_HIERARCHY_FILE)) {
|
||||||
|
handler = new HierarchyHandler(code2concept);
|
||||||
|
iterateOverZipFile(descriptors, CUSTOM_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add root concepts to CodeSystemVersion
|
||||||
|
for (TermConcept nextConcept : code2concept.values()) {
|
||||||
|
if (nextConcept.getParents().isEmpty()) {
|
||||||
|
csv.getConcepts().add(nextConcept);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
IIdType target = storeCodeSystem(theRequestDetails, csv, codeSystem, null, null);
|
||||||
|
return new UploadStatistics(code2concept.size(), target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Optional<String> loadFile(LoadedFileDescriptors theDescriptors, String... theFilenames) {
|
||||||
|
for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) {
|
||||||
|
for (String nextFilename : theFilenames) {
|
||||||
|
if (next.getFilename().endsWith(nextFilename)) {
|
||||||
|
try {
|
||||||
|
String contents = IOUtils.toString(next.getInputStream(), Charsets.UTF_8);
|
||||||
|
return Optional.of(contents);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
UploadStatistics processImgthlaFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) {
|
UploadStatistics processImgthlaFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) {
|
||||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||||
|
@ -285,13 +353,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
||||||
String nextFilename = nextZipBytes.getFilename();
|
String nextFilename = nextZipBytes.getFilename();
|
||||||
|
|
||||||
if(!IMGTHLA_HLA_NOM_TXT.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT)
|
if (!IMGTHLA_HLA_NOM_TXT.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT)
|
||||||
&& !IMGTHLA_HLA_XML.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) {
|
&& !IMGTHLA_HLA_XML.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) {
|
||||||
ourLog.info("Skipping unexpected file {}", nextFilename);
|
ourLog.info("Skipping unexpected file {}", nextFilename);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(IMGTHLA_HLA_NOM_TXT.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT)) {
|
if (IMGTHLA_HLA_NOM_TXT.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT)) {
|
||||||
// process colon-delimited hla_nom.txt file
|
// process colon-delimited hla_nom.txt file
|
||||||
ourLog.info("Processing file {}", nextFilename);
|
ourLog.info("Processing file {}", nextFilename);
|
||||||
|
|
||||||
|
@ -309,20 +377,20 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
LineNumberReader lnr = new LineNumberReader(reader);
|
LineNumberReader lnr = new LineNumberReader(reader);
|
||||||
while(lnr.readLine() != null) {}
|
while (lnr.readLine() != null) {
|
||||||
|
}
|
||||||
ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber());
|
ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber());
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new InternalErrorException(e);
|
throw new InternalErrorException(e);
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
IOUtils.closeQuietly(reader);
|
IOUtils.closeQuietly(reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
foundHlaNom = true;
|
foundHlaNom = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(IMGTHLA_HLA_XML.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) {
|
if (IMGTHLA_HLA_XML.equals(nextFilename) || nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) {
|
||||||
// process hla.xml file
|
// process hla.xml file
|
||||||
ourLog.info("Processing file {}", nextFilename);
|
ourLog.info("Processing file {}", nextFilename);
|
||||||
|
|
||||||
|
@ -340,13 +408,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
LineNumberReader lnr = new LineNumberReader(reader);
|
LineNumberReader lnr = new LineNumberReader(reader);
|
||||||
while(lnr.readLine() != null) {}
|
while (lnr.readLine() != null) {
|
||||||
|
}
|
||||||
ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber());
|
ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber());
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new InternalErrorException(e);
|
throw new InternalErrorException(e);
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
IOUtils.closeQuietly(reader);
|
IOUtils.closeQuietly(reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -553,11 +621,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
return new UploadStatistics(code2concept.size(), target);
|
return new UploadStatistics(code2concept.size(), target);
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
|
||||||
void setTermSvcDstu3ForUnitTest(IHapiTerminologySvcDstu3 theTermSvcDstu3) {
|
|
||||||
myTermSvcDstu3 = theTermSvcDstu3;
|
|
||||||
}
|
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
void setTermSvcForUnitTests(IHapiTerminologySvc theTermSvc) {
|
void setTermSvcForUnitTests(IHapiTerminologySvc theTermSvc) {
|
||||||
myTermSvc = theTermSvc;
|
myTermSvc = theTermSvc;
|
||||||
|
@ -571,38 +634,12 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
|
|
||||||
IIdType retVal;
|
IIdType retVal;
|
||||||
myTermSvc.setProcessDeferred(false);
|
myTermSvc.setProcessDeferred(false);
|
||||||
if (myTermSvcDstu3 != null) {
|
retVal = myTermSvc.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
|
||||||
retVal = myTermSvcDstu3.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
|
|
||||||
} else {
|
|
||||||
retVal = myTermSvcR4.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
|
|
||||||
}
|
|
||||||
myTermSvc.setProcessDeferred(true);
|
myTermSvc.setProcessDeferred(true);
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static String firstNonBlank(String... theStrings) {
|
|
||||||
String retVal = "";
|
|
||||||
for (String nextString : theStrings) {
|
|
||||||
if (isNotBlank(nextString)) {
|
|
||||||
retVal = nextString;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
|
|
||||||
TermConcept concept = id2concept.get(id);
|
|
||||||
if (concept == null) {
|
|
||||||
concept = new TermConcept();
|
|
||||||
id2concept.put(id, concept);
|
|
||||||
concept.setCodeSystemVersion(codeSystemVersion);
|
|
||||||
}
|
|
||||||
return concept;
|
|
||||||
}
|
|
||||||
|
|
||||||
static class LoadedFileDescriptors implements Closeable {
|
static class LoadedFileDescriptors implements Closeable {
|
||||||
|
|
||||||
private List<File> myTemporaryFiles = new ArrayList<>();
|
private List<File> myTemporaryFiles = new ArrayList<>();
|
||||||
|
@ -651,6 +688,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boolean hasFile(String theFilename) {
|
||||||
|
return myUncompressedFileDescriptors
|
||||||
|
.stream()
|
||||||
|
.map(t -> t.getFilename().replaceAll(".*[\\\\/]", "")) // Strip the path from the filename
|
||||||
|
.anyMatch(t -> t.equals(theFilename));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
for (File next : myTemporaryFiles) {
|
for (File next : myTemporaryFiles) {
|
||||||
|
@ -694,4 +738,25 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String firstNonBlank(String... theStrings) {
|
||||||
|
String retVal = "";
|
||||||
|
for (String nextString : theStrings) {
|
||||||
|
if (isNotBlank(nextString)) {
|
||||||
|
retVal = nextString;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
|
||||||
|
TermConcept concept = id2concept.get(id);
|
||||||
|
if (concept == null) {
|
||||||
|
concept = new TermConcept();
|
||||||
|
id2concept.put(id, concept);
|
||||||
|
concept.setCodeSystemVersion(codeSystemVersion);
|
||||||
|
}
|
||||||
|
return concept;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.custom;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class ConceptHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(ConceptHandler.class);
|
||||||
|
private final Map<String, TermConcept> myCode2Concept;
|
||||||
|
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||||
|
|
||||||
|
public ConceptHandler(Map<String, TermConcept> theCode2concept, TermCodeSystemVersion theCodeSystemVersion) {
|
||||||
|
myCode2Concept = theCode2concept;
|
||||||
|
myCodeSystemVersion = theCodeSystemVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
String code = trim(theRecord.get("CODE"));
|
||||||
|
if (isNotBlank(code)) {
|
||||||
|
String display = trim(theRecord.get("DISPLAY"));
|
||||||
|
|
||||||
|
Validate.isTrue(!myCode2Concept.containsKey(code), "The code %s has appeared more than once", code);
|
||||||
|
|
||||||
|
TermConcept concept = TerminologyLoaderSvcImpl.getOrCreateConcept(myCodeSystemVersion, myCode2Concept, code);
|
||||||
|
concept.setCode(code);
|
||||||
|
concept.setDisplay(display);
|
||||||
|
|
||||||
|
myCode2Concept.put(code, concept);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.custom;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import ca.uhn.fhir.util.ValidateUtil;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class HierarchyHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
private final Map<String, TermConcept> myCode2Concept;
|
||||||
|
|
||||||
|
public HierarchyHandler(Map<String, TermConcept> theCode2concept) {
|
||||||
|
myCode2Concept = theCode2concept;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
String parent = trim(theRecord.get("PARENT"));
|
||||||
|
String child = trim(theRecord.get("CHILD"));
|
||||||
|
if (isNotBlank(parent) && isNotBlank(child)) {
|
||||||
|
|
||||||
|
TermConcept parentConcept = myCode2Concept.get(parent);
|
||||||
|
ValidateUtil.isNotNullOrThrowUnprocessableEntity(parentConcept, "Parent code %s not found", parent);
|
||||||
|
TermConcept childConcept = myCode2Concept.get(child);
|
||||||
|
ValidateUtil.isNotNullOrThrowUnprocessableEntity(childConcept, "Child code %s not found", child);
|
||||||
|
|
||||||
|
parentConcept.addChild(childConcept, TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
import org.mockito.Captor;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.junit.MockitoJUnitRunner;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@RunWith(MockitoJUnitRunner.class)
|
||||||
|
abstract class BaseLoaderTest {
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
protected RequestDetails mySrd;
|
||||||
|
@Captor
|
||||||
|
protected ArgumentCaptor<List<ConceptMap>> myConceptMapCaptor;
|
||||||
|
@Captor
|
||||||
|
protected ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
||||||
|
@Captor
|
||||||
|
protected ArgumentCaptor<List<ValueSet>> myValueSetsCaptor;
|
||||||
|
@Captor
|
||||||
|
protected ArgumentCaptor<CodeSystem> mySystemCaptor;
|
||||||
|
|
||||||
|
Map<String, ConceptMap> extractConceptMaps() {
|
||||||
|
Map<String, ConceptMap> conceptMaps = new HashMap<>();
|
||||||
|
for (ConceptMap next : myConceptMapCaptor.getAllValues().get(0)) {
|
||||||
|
conceptMaps.put(next.getId(), next);
|
||||||
|
}
|
||||||
|
return conceptMaps;
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, TermConcept> extractConcepts() {
|
||||||
|
Map<String, TermConcept> concepts = new HashMap<>();
|
||||||
|
for (TermConcept next : myCsvCaptor.getValue().getConcepts()) {
|
||||||
|
concepts.put(next.getCode(), next);
|
||||||
|
}
|
||||||
|
return concepts;
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, ValueSet> extractValueSets() {
|
||||||
|
Map<String, ValueSet> valueSets = new HashMap<>();
|
||||||
|
for (ValueSet next : myValueSetsCaptor.getValue()) {
|
||||||
|
valueSets.put(next.getId(), next);
|
||||||
|
}
|
||||||
|
return valueSets;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.junit.MockitoJUnitRunner;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.Mockito.times;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
|
@RunWith(MockitoJUnitRunner.class)
|
||||||
|
public class TerminologyLoaderSvcCustomTest extends BaseLoaderTest {
|
||||||
|
private TerminologyLoaderSvcImpl mySvc;
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private IHapiTerminologySvc myTermSvc;
|
||||||
|
|
||||||
|
private ZipCollectionBuilder myFiles;
|
||||||
|
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() {
|
||||||
|
mySvc = new TerminologyLoaderSvcImpl();
|
||||||
|
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||||
|
|
||||||
|
myFiles = new ZipCollectionBuilder();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLoadComplete() throws Exception {
|
||||||
|
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CODESYSTEM_JSON);
|
||||||
|
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CONCEPTS_FILE);
|
||||||
|
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_HIERARCHY_FILE);
|
||||||
|
|
||||||
|
// Actually do the load
|
||||||
|
mySvc.loadCustom("http://example.com/labCodes", myFiles.getFiles(), mySrd);
|
||||||
|
|
||||||
|
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
||||||
|
Map<String, TermConcept> concepts = extractConcepts();
|
||||||
|
|
||||||
|
// Verify codesystem
|
||||||
|
assertEquals("http://example.com/labCodes", mySystemCaptor.getValue().getUrl());
|
||||||
|
assertEquals(CodeSystem.CodeSystemContentMode.NOTPRESENT, mySystemCaptor.getValue().getContent());
|
||||||
|
assertEquals("Example Lab Codes", mySystemCaptor.getValue().getName());
|
||||||
|
|
||||||
|
// Root code
|
||||||
|
TermConcept code;
|
||||||
|
assertEquals(2, concepts.size());
|
||||||
|
code = concepts.get("CHEM");
|
||||||
|
assertEquals("CHEM", code.getCode());
|
||||||
|
assertEquals("Chemistry", code.getDisplay());
|
||||||
|
|
||||||
|
assertEquals(2, code.getChildren().size());
|
||||||
|
assertEquals("HB", code.getChildren().get(0).getChild().getCode());
|
||||||
|
assertEquals("Hemoglobin", code.getChildren().get(0).getChild().getDisplay());
|
||||||
|
assertEquals("NEUT", code.getChildren().get(1).getChild().getCode());
|
||||||
|
assertEquals("Neutrophils", code.getChildren().get(1).getChild().getDisplay());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLoadWithNoCodeSystem() throws Exception {
|
||||||
|
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CONCEPTS_FILE);
|
||||||
|
|
||||||
|
// Actually do the load
|
||||||
|
mySvc.loadCustom("http://example.com/labCodes", myFiles.getFiles(), mySrd);
|
||||||
|
|
||||||
|
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
||||||
|
Map<String, TermConcept> concepts = extractConcepts();
|
||||||
|
|
||||||
|
// Verify codesystem
|
||||||
|
assertEquals("http://example.com/labCodes", mySystemCaptor.getValue().getUrl());
|
||||||
|
assertEquals(CodeSystem.CodeSystemContentMode.NOTPRESENT, mySystemCaptor.getValue().getContent());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* No hierarchy file supplied
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testLoadCodesOnly() throws Exception {
|
||||||
|
myFiles.addFileZip("/custom_term/", TerminologyLoaderSvcImpl.CUSTOM_CONCEPTS_FILE);
|
||||||
|
|
||||||
|
// Actually do the load
|
||||||
|
mySvc.loadCustom("http://example.com/labCodes", myFiles.getFiles(), mySrd);
|
||||||
|
|
||||||
|
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
||||||
|
Map<String, TermConcept> concepts = extractConcepts();
|
||||||
|
|
||||||
|
TermConcept code;
|
||||||
|
|
||||||
|
// Root code
|
||||||
|
assertEquals(5, concepts.size());
|
||||||
|
code = concepts.get("CHEM");
|
||||||
|
assertEquals("CHEM", code.getCode());
|
||||||
|
assertEquals("Chemistry", code.getDisplay());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,6 +1,5 @@
|
||||||
package ca.uhn.fhir.jpa.term;
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
@ -8,9 +7,7 @@ import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.MockitoJUnitRunner;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -18,19 +15,12 @@ import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.junit.Assert.assertThat;
|
import static org.junit.Assert.assertThat;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
@RunWith(MockitoJUnitRunner.class)
|
public class TerminologyLoaderSvcImgthlaTest extends BaseLoaderTest {
|
||||||
public class TerminologyLoaderSvcImgthlaTest {
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImgthlaTest.class);
|
|
||||||
private TerminologyLoaderSvcImpl mySvc;
|
private TerminologyLoaderSvcImpl mySvc;
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
|
|
||||||
@Mock
|
|
||||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
|
||||||
|
|
||||||
@Mock
|
|
||||||
private RequestDetails details;
|
|
||||||
private ZipCollectionBuilder myFiles;
|
private ZipCollectionBuilder myFiles;
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,7 +28,6 @@ public class TerminologyLoaderSvcImgthlaTest {
|
||||||
public void before() {
|
public void before() {
|
||||||
mySvc = new TerminologyLoaderSvcImpl();
|
mySvc = new TerminologyLoaderSvcImpl();
|
||||||
mySvc.setTermSvcForUnitTests(myTermSvc);
|
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||||
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
|
||||||
|
|
||||||
myFiles = new ZipCollectionBuilder();
|
myFiles = new ZipCollectionBuilder();
|
||||||
}
|
}
|
||||||
|
@ -49,7 +38,7 @@ public class TerminologyLoaderSvcImgthlaTest {
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
try {
|
try {
|
||||||
mySvc.loadImgthla(myFiles.getFiles(), details);
|
mySvc.loadImgthla(myFiles.getFiles(), mySrd);
|
||||||
fail("Expected \"not yet fully implemented\" InternalErrorException");
|
fail("Expected \"not yet fully implemented\" InternalErrorException");
|
||||||
} catch(InternalErrorException e) {
|
} catch(InternalErrorException e) {
|
||||||
// for now, expect "not yet fully implemented" exception
|
// for now, expect "not yet fully implemented" exception
|
||||||
|
@ -65,7 +54,7 @@ public class TerminologyLoaderSvcImgthlaTest {
|
||||||
addImgthlaMandatoryFilesToZip(myFiles);
|
addImgthlaMandatoryFilesToZip(myFiles);
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
mySvc.loadImgthla(myFiles.getFiles(), details);
|
mySvc.loadImgthla(myFiles.getFiles(), mySrd);
|
||||||
|
|
||||||
// TODO: verify the code system was loaded correctly (similarly to TerminologyLoaderSvcLoincTest.testLoadLoincMandatoryFilesOnly)
|
// TODO: verify the code system was loaded correctly (similarly to TerminologyLoaderSvcLoincTest.testLoadLoincMandatoryFilesOnly)
|
||||||
}
|
}
|
||||||
|
@ -76,7 +65,7 @@ public class TerminologyLoaderSvcImgthlaTest {
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
try {
|
try {
|
||||||
mySvc.loadImgthla(myFiles.getFiles(), details);
|
mySvc.loadImgthla(myFiles.getFiles(), mySrd);
|
||||||
fail("Expected UnprocessableEntityException");
|
fail("Expected UnprocessableEntityException");
|
||||||
} catch (UnprocessableEntityException e) {
|
} catch (UnprocessableEntityException e) {
|
||||||
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));
|
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package ca.uhn.fhir.jpa.term;
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.*;
|
import ca.uhn.fhir.jpa.term.loinc.*;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
@ -15,46 +14,28 @@ import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
import org.mockito.Captor;
|
import org.mockito.Captor;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.MockitoJUnitRunner;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.contains;
|
import static org.hamcrest.Matchers.*;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
|
||||||
import static org.hamcrest.Matchers.empty;
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
@RunWith(MockitoJUnitRunner.class)
|
public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest {
|
||||||
public class TerminologyLoaderSvcLoincTest {
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcLoincTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcLoincTest.class);
|
||||||
private TerminologyLoaderSvcImpl mySvc;
|
private TerminologyLoaderSvcImpl mySvc;
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
|
|
||||||
@Mock
|
|
||||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
|
||||||
|
|
||||||
@Captor
|
|
||||||
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
|
||||||
@Captor
|
@Captor
|
||||||
private ArgumentCaptor<CodeSystem> mySystemCaptor;
|
private ArgumentCaptor<CodeSystem> mySystemCaptor;
|
||||||
@Mock
|
|
||||||
private RequestDetails details;
|
|
||||||
@Captor
|
|
||||||
private ArgumentCaptor<List<ValueSet>> myValueSetsCaptor;
|
|
||||||
@Captor
|
|
||||||
private ArgumentCaptor<List<ConceptMap>> myConceptMapCaptor;
|
|
||||||
private ZipCollectionBuilder myFiles;
|
private ZipCollectionBuilder myFiles;
|
||||||
|
|
||||||
|
|
||||||
|
@ -62,43 +43,18 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
public void before() {
|
public void before() {
|
||||||
mySvc = new TerminologyLoaderSvcImpl();
|
mySvc = new TerminologyLoaderSvcImpl();
|
||||||
mySvc.setTermSvcForUnitTests(myTermSvc);
|
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||||
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
|
||||||
|
|
||||||
myFiles = new ZipCollectionBuilder();
|
myFiles = new ZipCollectionBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, ConceptMap> extractConceptMaps() {
|
|
||||||
Map<String, ConceptMap> conceptMaps = new HashMap<>();
|
|
||||||
for (ConceptMap next : myConceptMapCaptor.getAllValues().get(0)) {
|
|
||||||
conceptMaps.put(next.getId(), next);
|
|
||||||
}
|
|
||||||
return conceptMaps;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<String, TermConcept> extractConcepts() {
|
|
||||||
Map<String, TermConcept> concepts = new HashMap<>();
|
|
||||||
for (TermConcept next : myCsvCaptor.getValue().getConcepts()) {
|
|
||||||
concepts.put(next.getCode(), next);
|
|
||||||
}
|
|
||||||
return concepts;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<String, ValueSet> extractValueSets() {
|
|
||||||
Map<String, ValueSet> valueSets = new HashMap<>();
|
|
||||||
for (ValueSet next : myValueSetsCaptor.getValue()) {
|
|
||||||
valueSets.put(next.getId(), next);
|
|
||||||
}
|
|
||||||
return valueSets;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLoadLoinc() throws Exception {
|
public void testLoadLoinc() throws Exception {
|
||||||
addLoincMandatoryFilesToZip(myFiles);
|
addLoincMandatoryFilesToZip(myFiles);
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
mySvc.loadLoinc(myFiles.getFiles(), details);
|
mySvc.loadLoinc(myFiles.getFiles(), mySrd);
|
||||||
|
|
||||||
verify(myTermSvcDstu3, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
||||||
Map<String, TermConcept> concepts = extractConcepts();
|
Map<String, TermConcept> concepts = extractConcepts();
|
||||||
Map<String, ValueSet> valueSets = extractValueSets();
|
Map<String, ValueSet> valueSets = extractValueSets();
|
||||||
Map<String, ConceptMap> conceptMaps = extractConceptMaps();
|
Map<String, ConceptMap> conceptMaps = extractConceptMaps();
|
||||||
|
@ -347,9 +303,9 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
addLoincMandatoryFilesToZip(myFiles);
|
addLoincMandatoryFilesToZip(myFiles);
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
mySvc.loadLoinc(myFiles.getFiles(), details);
|
mySvc.loadLoinc(myFiles.getFiles(), mySrd);
|
||||||
|
|
||||||
verify(myTermSvcDstu3, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
||||||
Map<String, TermConcept> concepts = extractConcepts();
|
Map<String, TermConcept> concepts = extractConcepts();
|
||||||
Map<String, ValueSet> valueSets = extractValueSets();
|
Map<String, ValueSet> valueSets = extractValueSets();
|
||||||
Map<String, ConceptMap> conceptMaps = extractConceptMaps();
|
Map<String, ConceptMap> conceptMaps = extractConceptMaps();
|
||||||
|
@ -371,7 +327,7 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
try {
|
try {
|
||||||
mySvc.loadLoinc(myFiles.getFiles(), details);
|
mySvc.loadLoinc(myFiles.getFiles(), mySrd);
|
||||||
fail();
|
fail();
|
||||||
} catch (UnprocessableEntityException e) {
|
} catch (UnprocessableEntityException e) {
|
||||||
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));
|
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));
|
||||||
|
|
|
@ -4,39 +4,36 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
import org.hl7.fhir.r4.model.ConceptMap;
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
import org.mockito.Captor;
|
import org.mockito.Captor;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.runners.MockitoJUnitRunner;
|
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.util.*;
|
import java.util.ArrayList;
|
||||||
|
import java.util.TreeSet;
|
||||||
import java.util.zip.ZipOutputStream;
|
import java.util.zip.ZipOutputStream;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
import static org.junit.Assert.*;
|
import static org.hamcrest.Matchers.hasItem;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.hamcrest.Matchers.not;
|
||||||
import static org.mockito.Matchers.anyListOf;
|
import static org.junit.Assert.assertThat;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.mockito.ArgumentMatchers.*;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
@RunWith(MockitoJUnitRunner.class)
|
public class TerminologyLoaderSvcSnomedCtTest extends BaseLoaderTest {
|
||||||
public class TerminologyLoaderSvcSnomedCtTest {
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcSnomedCtTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcSnomedCtTest.class);
|
||||||
private TerminologyLoaderSvcImpl mySvc;
|
private TerminologyLoaderSvcImpl mySvc;
|
||||||
|
|
||||||
|
@ -44,15 +41,12 @@ public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
@Captor
|
@Captor
|
||||||
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
||||||
@Mock
|
|
||||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
|
||||||
private ZipCollectionBuilder myFiles;
|
private ZipCollectionBuilder myFiles;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() {
|
public void before() {
|
||||||
mySvc = new TerminologyLoaderSvcImpl();
|
mySvc = new TerminologyLoaderSvcImpl();
|
||||||
mySvc.setTermSvcForUnitTests(myTermSvc);
|
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||||
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
|
||||||
|
|
||||||
myFiles = new ZipCollectionBuilder();
|
myFiles = new ZipCollectionBuilder();
|
||||||
}
|
}
|
||||||
|
@ -84,17 +78,16 @@ public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
myFiles.addFileZip("/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt");
|
myFiles.addFileZip("/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt");
|
||||||
myFiles.addFileZip("/sct/", "sct2_TextDefinition_Full-en_INT_20160131.txt");
|
myFiles.addFileZip("/sct/", "sct2_TextDefinition_Full-en_INT_20160131.txt");
|
||||||
|
|
||||||
RequestDetails details = mock(RequestDetails.class);
|
mySvc.loadSnomedCt(myFiles.getFiles(), mySrd);
|
||||||
mySvc.loadSnomedCt(myFiles.getFiles(), details);
|
|
||||||
|
|
||||||
verify(myTermSvcDstu3).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyListOf(ValueSet.class), anyListOf(ConceptMap.class));
|
verify(myTermSvc).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyList(), anyListOf(ConceptMap.class));
|
||||||
|
|
||||||
TermCodeSystemVersion csv = myCsvCaptor.getValue();
|
TermCodeSystemVersion csv = myCsvCaptor.getValue();
|
||||||
TreeSet<String> allCodes = toCodes(csv, true);
|
TreeSet<String> allCodes = toCodes(csv, true);
|
||||||
ourLog.info(allCodes.toString());
|
ourLog.info(allCodes.toString());
|
||||||
|
|
||||||
assertThat(allCodes, containsInRelativeOrder("116680003"));
|
assertThat(allCodes, hasItem("116680003"));
|
||||||
assertThat(allCodes, not(containsInRelativeOrder("207527008")));
|
assertThat(allCodes, not(hasItem("207527008")));
|
||||||
|
|
||||||
allCodes = toCodes(csv, false);
|
allCodes = toCodes(csv, false);
|
||||||
ourLog.info(allCodes.toString());
|
ourLog.info(allCodes.toString());
|
||||||
|
@ -110,8 +103,7 @@ public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
public void testLoadSnomedCtAgainstRealFile() throws Exception {
|
public void testLoadSnomedCtAgainstRealFile() throws Exception {
|
||||||
byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
|
byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
|
||||||
|
|
||||||
RequestDetails details = mock(RequestDetails.class);
|
mySvc.loadSnomedCt(list(bytes), mySrd);
|
||||||
mySvc.loadSnomedCt(list(bytes), details);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -123,9 +115,8 @@ public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
|
|
||||||
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
||||||
|
|
||||||
RequestDetails details = mock(RequestDetails.class);
|
|
||||||
try {
|
try {
|
||||||
mySvc.loadSnomedCt(list(bos.toByteArray()), details);
|
mySvc.loadSnomedCt(list(bos.toByteArray()), mySrd);
|
||||||
fail();
|
fail();
|
||||||
} catch (UnprocessableEntityException e) {
|
} catch (UnprocessableEntityException e) {
|
||||||
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input: "));
|
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input: "));
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"resourceType": "CodeSystem",
|
||||||
|
"url": "http://example.com/labCodes",
|
||||||
|
"name": "Example Lab Codes",
|
||||||
|
"description": "A set of lab codes",
|
||||||
|
"status": "active",
|
||||||
|
"publisher": "Example Organization Corporation Worldwide",
|
||||||
|
"date": "2019-07-30",
|
||||||
|
"content": "not-present"
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
CODE,DISPLAY
|
||||||
|
|
||||||
|
CHEM,Chemistry
|
||||||
|
HB,Hemoglobin
|
||||||
|
NEUT,Neutrophils
|
||||||
|
|
||||||
|
MICRO,Microbiology
|
||||||
|
C&S,Culture and Sensitivity
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
PARENT,CHILD
|
||||||
|
|
||||||
|
CHEM,HB
|
||||||
|
CHEM,NEUT
|
||||||
|
|
||||||
|
MICRO,C&S
|
|
|
@ -3,15 +3,17 @@ package org.hl7.fhir.r4.hapi.validation;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import com.github.benmanes.caffeine.cache.Cache;
|
import com.github.benmanes.caffeine.cache.Cache;
|
||||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||||
|
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
|
||||||
import org.hl7.fhir.r4.terminologies.ValueSetExpander;
|
import org.hl7.fhir.r4.terminologies.ValueSetExpander;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public class CachingValidationSupport implements IValidationSupport {
|
public class CachingValidationSupport implements IValidationSupport {
|
||||||
|
@ -21,7 +23,11 @@ public class CachingValidationSupport implements IValidationSupport {
|
||||||
|
|
||||||
public CachingValidationSupport(IValidationSupport theWrap) {
|
public CachingValidationSupport(IValidationSupport theWrap) {
|
||||||
myWrap = theWrap;
|
myWrap = theWrap;
|
||||||
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build();
|
myCache = Caffeine
|
||||||
|
.newBuilder()
|
||||||
|
.expireAfterWrite(60, TimeUnit.SECONDS)
|
||||||
|
.maximumSize(5000)
|
||||||
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -31,14 +37,14 @@ public class CachingValidationSupport implements IValidationSupport {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
|
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
|
||||||
return (List<IBaseResource>) myCache.get("fetchAllConformanceResources",
|
String key = "fetchAllConformanceResources";
|
||||||
t -> myWrap.fetchAllConformanceResources(theContext));
|
return loadFromCache(key, t -> myWrap.fetchAllConformanceResources(theContext));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
|
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
|
||||||
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions",
|
String key = "fetchAllStructureDefinitions";
|
||||||
t -> myWrap.fetchAllStructureDefinitions(theContext));
|
return loadFromCache(key, t -> myWrap.fetchAllStructureDefinitions(theContext));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -63,7 +69,8 @@ public class CachingValidationSupport implements IValidationSupport {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
||||||
return myWrap.isCodeSystemSupported(theContext, theSystem);
|
String key = "isCodeSystemSupported " + theSystem;
|
||||||
|
return loadFromCache(key, t -> myWrap.isCodeSystemSupported(theContext, theSystem));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -73,11 +80,18 @@ public class CachingValidationSupport implements IValidationSupport {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
|
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
|
||||||
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay);
|
String key = "validateCode " + theCodeSystem + " " + theCode;
|
||||||
|
return loadFromCache(key, t -> myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LookupCodeResult lookupCode(FhirContext theContext, String theSystem, String theCode) {
|
public LookupCodeResult lookupCode(FhirContext theContext, String theSystem, String theCode) {
|
||||||
return myWrap.lookupCode(theContext, theSystem, theCode);
|
String key = "lookupCode " + theSystem + " " + theCode;
|
||||||
|
return loadFromCache(key, t -> myWrap.lookupCode(theContext, theSystem, theCode));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private <T> T loadFromCache(String theKey, Function<String, T> theLoader) {
|
||||||
|
return (T) myCache.get(theKey, theLoader);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -354,6 +354,11 @@
|
||||||
type for an operation declared on a plain provider without needing to use
|
type for an operation declared on a plain provider without needing to use
|
||||||
a specific version of the FHIR structures.
|
a specific version of the FHIR structures.
|
||||||
</action>
|
</action>
|
||||||
|
<action type="add">
|
||||||
|
The $upload-external-code-system operation and the corresponding HAPI FHIR CLI command
|
||||||
|
can now be used to upload custom vocabulary that has been converted into a standard file format
|
||||||
|
defined by HAPI FHIR. This is useful for uploading large organizational code systems.
|
||||||
|
</action>
|
||||||
</release>
|
</release>
|
||||||
<release version="3.8.0" date="2019-05-30" description="Hippo">
|
<release version="3.8.0" date="2019-05-30" description="Hippo">
|
||||||
<action type="fix">
|
<action type="fix">
|
||||||
|
|
Loading…
Reference in New Issue