Add support for ICD-10-CM (#2702)
* Add support for ICD-10-CM * Have this working * Fix LGTM issues * Test fix
This commit is contained in:
parent
94d6b15db9
commit
62be623cf5
|
@ -31,6 +31,7 @@ import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.codehaus.stax2.XMLOutputFactory2;
|
import org.codehaus.stax2.XMLOutputFactory2;
|
||||||
import org.codehaus.stax2.io.EscapingWriterFactory;
|
import org.codehaus.stax2.io.EscapingWriterFactory;
|
||||||
import org.w3c.dom.Document;
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
import org.w3c.dom.Node;
|
import org.w3c.dom.Node;
|
||||||
import org.xml.sax.InputSource;
|
import org.xml.sax.InputSource;
|
||||||
import org.xml.sax.SAXException;
|
import org.xml.sax.SAXException;
|
||||||
|
@ -1863,10 +1864,19 @@ public class XmlUtil {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Document parseDocument(String theInput) throws IOException, SAXException {
|
public static Document parseDocument(String theInput) throws IOException, SAXException {
|
||||||
|
StringReader reader = new StringReader(theInput);
|
||||||
|
return parseDocument(reader);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Document parseDocument(Reader reader) throws SAXException, IOException {
|
||||||
|
return parseDocument(reader, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Document parseDocument(Reader theReader, boolean theNamespaceAware) throws SAXException, IOException {
|
||||||
DocumentBuilder builder;
|
DocumentBuilder builder;
|
||||||
try {
|
try {
|
||||||
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
|
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
|
||||||
docBuilderFactory.setNamespaceAware(true);
|
docBuilderFactory.setNamespaceAware(theNamespaceAware);
|
||||||
docBuilderFactory.setXIncludeAware(false);
|
docBuilderFactory.setXIncludeAware(false);
|
||||||
docBuilderFactory.setExpandEntityReferences(false);
|
docBuilderFactory.setExpandEntityReferences(false);
|
||||||
try {
|
try {
|
||||||
|
@ -1885,10 +1895,23 @@ public class XmlUtil {
|
||||||
throw new ConfigurationException(e);
|
throw new ConfigurationException(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
InputSource src = new InputSource(new StringReader(theInput));
|
InputSource src = new InputSource(theReader);
|
||||||
return builder.parse(src);
|
return builder.parse(src);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static List<Element> getChildrenByTagName(Element theParent, String theName) {
|
||||||
|
List<Element> nodeList = new ArrayList<Element>();
|
||||||
|
for (Node child = theParent.getFirstChild(); child != null; child = child.getNextSibling()) {
|
||||||
|
if (child.getNodeType() == Node.ELEMENT_NODE && theName.equals(child.getNodeName())) {
|
||||||
|
nodeList.add((Element) child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nodeList;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public static String encodeDocument(Node theElement) throws TransformerException {
|
public static String encodeDocument(Node theElement) throws TransformerException {
|
||||||
return encodeDocument(theElement, false);
|
return encodeDocument(theElement, false);
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,6 +123,10 @@ public class UploadTerminologyCommand extends BaseCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void invokeOperation(String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName) throws ParseException {
|
private void invokeOperation(String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName) throws ParseException {
|
||||||
|
boolean isDeltaOperation =
|
||||||
|
theOperationName.equals(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) ||
|
||||||
|
theOperationName.equals(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE);
|
||||||
|
|
||||||
ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_SYSTEM, theTermUrl);
|
ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_SYSTEM, theTermUrl);
|
||||||
|
|
||||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||||
|
@ -134,19 +138,37 @@ public class UploadTerminologyCommand extends BaseCommand {
|
||||||
for (String nextDataFile : theDatafile) {
|
for (String nextDataFile : theDatafile) {
|
||||||
|
|
||||||
try (FileInputStream fileInputStream = new FileInputStream(nextDataFile)) {
|
try (FileInputStream fileInputStream = new FileInputStream(nextDataFile)) {
|
||||||
if (nextDataFile.endsWith(".csv") || nextDataFile.endsWith(".properties")) {
|
boolean isFhirType = nextDataFile.endsWith(".json") || nextDataFile.endsWith(".xml");
|
||||||
|
if (nextDataFile.endsWith(".csv") || nextDataFile.endsWith(".properties") || isFhirType) {
|
||||||
|
|
||||||
ourLog.info("Compressing and adding file: {}", nextDataFile);
|
if (isDeltaOperation && isFhirType) {
|
||||||
ZipEntry nextEntry = new ZipEntry(stripPath(nextDataFile));
|
|
||||||
zipOutputStream.putNextEntry(nextEntry);
|
|
||||||
|
|
||||||
CountingInputStream countingInputStream = new CountingInputStream(fileInputStream);
|
ourLog.info("Adding CodeSystem resource file: {}", nextDataFile);
|
||||||
IOUtils.copy(countingInputStream, zipOutputStream);
|
|
||||||
haveCompressedContents = true;
|
|
||||||
compressedSourceBytesCount += countingInputStream.getCount();
|
|
||||||
|
|
||||||
zipOutputStream.flush();
|
String contents = IOUtils.toString(fileInputStream, Charsets.UTF_8);
|
||||||
ourLog.info("Finished compressing {}", nextDataFile);
|
EncodingEnum encoding = EncodingEnum.detectEncodingNoDefault(contents);
|
||||||
|
if (encoding == null) {
|
||||||
|
throw new ParseException("Could not detect FHIR encoding for file: " + nextDataFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
CodeSystem resource = encoding.newParser(myFhirCtx).parseResource(CodeSystem.class, contents);
|
||||||
|
ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_CODESYSTEM, resource);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
ourLog.info("Compressing and adding file: {}", nextDataFile);
|
||||||
|
ZipEntry nextEntry = new ZipEntry(stripPath(nextDataFile));
|
||||||
|
zipOutputStream.putNextEntry(nextEntry);
|
||||||
|
|
||||||
|
CountingInputStream countingInputStream = new CountingInputStream(fileInputStream);
|
||||||
|
IOUtils.copy(countingInputStream, zipOutputStream);
|
||||||
|
haveCompressedContents = true;
|
||||||
|
compressedSourceBytesCount += countingInputStream.getCount();
|
||||||
|
|
||||||
|
zipOutputStream.flush();
|
||||||
|
ourLog.info("Finished compressing {}", nextDataFile);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
} else if (nextDataFile.endsWith(".zip")) {
|
} else if (nextDataFile.endsWith(".zip")) {
|
||||||
|
|
||||||
|
@ -154,19 +176,6 @@ public class UploadTerminologyCommand extends BaseCommand {
|
||||||
String fileName = "file:" + nextDataFile;
|
String fileName = "file:" + nextDataFile;
|
||||||
addFileToRequestBundle(theInputParameters, fileName, IOUtils.toByteArray(fileInputStream));
|
addFileToRequestBundle(theInputParameters, fileName, IOUtils.toByteArray(fileInputStream));
|
||||||
|
|
||||||
} else if (nextDataFile.endsWith(".json") || nextDataFile.endsWith(".xml")) {
|
|
||||||
|
|
||||||
ourLog.info("Adding CodeSystem resource file: {}", nextDataFile);
|
|
||||||
|
|
||||||
String contents = IOUtils.toString(fileInputStream, Charsets.UTF_8);
|
|
||||||
EncodingEnum encoding = EncodingEnum.detectEncodingNoDefault(contents);
|
|
||||||
if (encoding == null) {
|
|
||||||
throw new ParseException("Could not detect FHIR encoding for file: " + nextDataFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
CodeSystem resource = encoding.newParser(myFhirCtx).parseResource(CodeSystem.class, contents);
|
|
||||||
ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_CODESYSTEM, resource);
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
throw new ParseException("Don't know how to handle file: " + nextDataFile);
|
throw new ParseException("Don't know how to handle file: " + nextDataFile);
|
||||||
|
|
|
@ -96,6 +96,21 @@
|
||||||
<artifactId>h2</artifactId>
|
<artifactId>h2</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Hibernate search Lucene backend -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.hibernate.search</groupId>
|
||||||
|
<artifactId>hibernate-search-backend-lucene</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-analyzers-phonetic</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-backward-codecs</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
<!-- The following dependencies are only needed for automated unit tests, you do not neccesarily need them to run the example. -->
|
<!-- The following dependencies are only needed for automated unit tests, you do not neccesarily need them to run the example. -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.eclipse.jetty</groupId>
|
<groupId>org.eclipse.jetty</groupId>
|
||||||
|
|
|
@ -92,14 +92,15 @@ public class CommonConfig {
|
||||||
extraProperties.put("hibernate.cache.use_second_level_cache", "false");
|
extraProperties.put("hibernate.cache.use_second_level_cache", "false");
|
||||||
extraProperties.put("hibernate.cache.use_structured_entries", "false");
|
extraProperties.put("hibernate.cache.use_structured_entries", "false");
|
||||||
extraProperties.put("hibernate.cache.use_minimal_puts", "false");
|
extraProperties.put("hibernate.cache.use_minimal_puts", "false");
|
||||||
|
extraProperties.put("hibernate.search.backend.type", "lucene");
|
||||||
|
|
||||||
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
|
// extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
|
||||||
extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem");
|
// extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem");
|
||||||
extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), "target/lucenefiles");
|
// extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), "target/lucenefiles");
|
||||||
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
|
// extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
|
||||||
if (System.getProperty("lowmem") != null) {
|
// if (System.getProperty("lowmem") != null) {
|
||||||
extraProperties.put(HibernateOrmMapperSettings.ENABLED, "false");
|
extraProperties.put(HibernateOrmMapperSettings.ENABLED, "false");
|
||||||
}
|
// }
|
||||||
|
|
||||||
return extraProperties;
|
return extraProperties;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 2702
|
||||||
|
title: "The JPA server terminology uploader now supports uploading ICD-10-CM (US Edition) using the
|
||||||
|
native format for that vocabulary."
|
|
@ -83,6 +83,12 @@ Note that the path and exact filename of the terminology files will likely need
|
||||||
./hapi-fhir-cli upload-terminology -d Downloads/LOINC_2.54_MULTI-AXIAL_HIERARCHY.zip -d Downloads/LOINC_2.54_Text.zip -f dstu3 -t http://localhost:8080/baseDstu3 -u http://loinc.org
|
./hapi-fhir-cli upload-terminology -d Downloads/LOINC_2.54_MULTI-AXIAL_HIERARCHY.zip -d Downloads/LOINC_2.54_Text.zip -f dstu3 -t http://localhost:8080/baseDstu3 -u http://loinc.org
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### ICD-10-CM
|
||||||
|
|
||||||
|
```
|
||||||
|
./hapi-fhir-cli upload-terminology -d Downloads/LOINC_2.54_MULTI-AXIAL_HIERARCHY.zip -d icd10cm_tabular_2021.xml -f dstu3 -t http://localhost:8080/baseDstu3 -u http://hl7.org/fhir/sid/icd-10-cm
|
||||||
|
```
|
||||||
|
|
||||||
# Migrate Database
|
# Migrate Database
|
||||||
|
|
||||||
The `migrate-database` command may be used to Migrate a database schema when upgrading a [HAPI FHIR JPA](/docs/server_jpa/introduction.html) project from one version of HAPI FHIR to another version.
|
The `migrate-database` command may be used to Migrate a database schema when upgrading a [HAPI FHIR JPA](/docs/server_jpa/introduction.html) project from one version of HAPI FHIR to another version.
|
||||||
|
|
|
@ -127,14 +127,17 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
||||||
|
|
||||||
UploadStatistics stats;
|
UploadStatistics stats;
|
||||||
switch (codeSystemUrl) {
|
switch (codeSystemUrl) {
|
||||||
case ITermLoaderSvc.SCT_URI:
|
case ITermLoaderSvc.ICD10CM_URI:
|
||||||
stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
|
stats = myTerminologyLoaderSvc.loadIcd10cm(localFiles, theRequestDetails);
|
||||||
|
break;
|
||||||
|
case ITermLoaderSvc.IMGTHLA_URI:
|
||||||
|
stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
|
||||||
break;
|
break;
|
||||||
case ITermLoaderSvc.LOINC_URI:
|
case ITermLoaderSvc.LOINC_URI:
|
||||||
stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
|
stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
|
||||||
break;
|
break;
|
||||||
case ITermLoaderSvc.IMGTHLA_URI:
|
case ITermLoaderSvc.SCT_URI:
|
||||||
stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
|
stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
|
stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
|
||||||
|
@ -395,7 +398,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class FileBackedFileDescriptor implements ITermLoaderSvc.FileDescriptor {
|
public static class FileBackedFileDescriptor implements ITermLoaderSvc.FileDescriptor {
|
||||||
private final File myNextFile;
|
private final File myNextFile;
|
||||||
|
|
||||||
public FileBackedFileDescriptor(File theNextFile) {
|
public FileBackedFileDescriptor(File theNextFile) {
|
||||||
|
|
|
@ -9,6 +9,7 @@ import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||||
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
||||||
|
import ca.uhn.fhir.jpa.term.icd10cm.Icd10CmLoader;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListLinkHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListLinkHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincDocumentOntologyHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincDocumentOntologyHandler;
|
||||||
|
@ -53,6 +54,7 @@ import org.hl7.fhir.r4.model.Enumerations;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.springframework.aop.support.AopUtils;
|
import org.springframework.aop.support.AopUtils;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.xml.sax.SAXException;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.validation.constraints.NotNull;
|
import javax.validation.constraints.NotNull;
|
||||||
|
@ -69,6 +71,7 @@ import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -260,6 +263,40 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public UploadStatistics loadIcd10cm(List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||||
|
ourLog.info("Beginning ICD-10-cm processing");
|
||||||
|
|
||||||
|
CodeSystem cs = new CodeSystem();
|
||||||
|
cs.setUrl(ICD10CM_URI);
|
||||||
|
cs.setName("ICD-10-CM");
|
||||||
|
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||||
|
cs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
|
||||||
|
TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||||
|
int count = 0;
|
||||||
|
|
||||||
|
try (LoadedFileDescriptors compressedDescriptors = new LoadedFileDescriptors(theFiles)) {
|
||||||
|
for (FileDescriptor nextDescriptor : compressedDescriptors.getUncompressedFileDescriptors()) {
|
||||||
|
if (nextDescriptor.getFilename().toLowerCase(Locale.US).endsWith(".xml")) {
|
||||||
|
try (InputStream inputStream = nextDescriptor.getInputStream()) {
|
||||||
|
InputStreamReader reader = new InputStreamReader(inputStream, Charsets.UTF_8);
|
||||||
|
Icd10CmLoader loader = new Icd10CmLoader(codeSystemVersion);
|
||||||
|
loader.load(reader);
|
||||||
|
count += loader.getConceptCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException | SAXException e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
cs.setVersion(codeSystemVersion.getCodeSystemVersionId());
|
||||||
|
|
||||||
|
IIdType target = storeCodeSystem(theRequestDetails, codeSystemVersion, cs, null, null);
|
||||||
|
return new UploadStatistics(count, target);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
public UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||||
|
|
|
@ -36,6 +36,7 @@ public interface ITermLoaderSvc {
|
||||||
String IMGTHLA_URI = "http://www.ebi.ac.uk/ipd/imgt/hla";
|
String IMGTHLA_URI = "http://www.ebi.ac.uk/ipd/imgt/hla";
|
||||||
String LOINC_URI = "http://loinc.org";
|
String LOINC_URI = "http://loinc.org";
|
||||||
String SCT_URI = "http://snomed.info/sct";
|
String SCT_URI = "http://snomed.info/sct";
|
||||||
|
String ICD10CM_URI = "http://hl7.org/fhir/sid/icd-10-cm";
|
||||||
String IEEE_11073_10101_URI = "urn:iso:std:iso:11073:10101";
|
String IEEE_11073_10101_URI = "urn:iso:std:iso:11073:10101";
|
||||||
|
|
||||||
UploadStatistics loadImgthla(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
UploadStatistics loadImgthla(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||||
|
@ -44,6 +45,8 @@ public interface ITermLoaderSvc {
|
||||||
|
|
||||||
UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
|
UploadStatistics loadIcd10cm(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.icd10cm;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||||
|
import ca.uhn.fhir.util.XmlUtil;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.xml.sax.SAXException;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
|
public class Icd10CmLoader {
|
||||||
|
|
||||||
|
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||||
|
private int myConceptCount;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
|
public Icd10CmLoader(TermCodeSystemVersion theCodeSystemVersion) {
|
||||||
|
myCodeSystemVersion = theCodeSystemVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void load(Reader theReader) throws IOException, SAXException {
|
||||||
|
myConceptCount = 0;
|
||||||
|
|
||||||
|
Document document = XmlUtil.parseDocument(theReader, false);
|
||||||
|
Element documentElement = document.getDocumentElement();
|
||||||
|
|
||||||
|
// Extract version: Should only be 1 tag
|
||||||
|
for (Element nextVersion : XmlUtil.getChildrenByTagName(documentElement, "version")) {
|
||||||
|
String versionId = nextVersion.getTextContent();
|
||||||
|
if (isNotBlank(versionId)) {
|
||||||
|
myCodeSystemVersion.setCodeSystemVersionId(versionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract Diags (codes)
|
||||||
|
for (Element nextChapter : XmlUtil.getChildrenByTagName(documentElement, "chapter")) {
|
||||||
|
for (Element nextSection : XmlUtil.getChildrenByTagName(nextChapter, "section")) {
|
||||||
|
for (Element nextDiag : XmlUtil.getChildrenByTagName(nextSection, "diag")) {
|
||||||
|
extractCode(nextDiag, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private void extractCode(Element theDiagElement, TermConcept theParentConcept) {
|
||||||
|
String code = theDiagElement.getElementsByTagName("name").item(0).getTextContent();
|
||||||
|
String display = theDiagElement.getElementsByTagName("desc").item(0).getTextContent();
|
||||||
|
|
||||||
|
TermConcept concept;
|
||||||
|
if (theParentConcept == null) {
|
||||||
|
concept = myCodeSystemVersion.addConcept();
|
||||||
|
} else {
|
||||||
|
concept = theParentConcept.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||||
|
}
|
||||||
|
|
||||||
|
concept.setCode(code);
|
||||||
|
concept.setDisplay(display);
|
||||||
|
|
||||||
|
for (Element nextChildDiag : XmlUtil.getChildrenByTagName(theDiagElement, "diag")) {
|
||||||
|
extractCode(nextChildDiag, concept);
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public int getConceptCount() {
|
||||||
|
return myConceptCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -9,6 +9,7 @@ import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.util.ClasspathUtil;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.hl7.fhir.r4.model.Attachment;
|
import org.hl7.fhir.r4.model.Attachment;
|
||||||
|
@ -95,6 +96,25 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
public void testUploadIcd10cm() {
|
||||||
|
byte[] packageBytes = ClasspathUtil.loadResourceAsByteArray("/icd/icd10cm_tabular_2021.xml");
|
||||||
|
|
||||||
|
Parameters respParam = myClient
|
||||||
|
.operation()
|
||||||
|
.onType(CodeSystem.class)
|
||||||
|
.named("upload-external-code-system")
|
||||||
|
.withParameter(Parameters.class, TerminologyUploaderProvider.PARAM_SYSTEM, new UriType(ITermLoaderSvc.ICD10CM_URI))
|
||||||
|
.andParameter(TerminologyUploaderProvider.PARAM_FILE, new Attachment().setUrl("icd10cm_tabular_2021.xml").setData(packageBytes))
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
||||||
|
ourLog.info(resp);
|
||||||
|
|
||||||
|
assertThat(((IntegerType) respParam.getParameter().get(1).getValue()).getValue(), greaterThan(1));
|
||||||
|
assertThat(((Reference) respParam.getParameter().get(2).getValue()).getReference(), matchesPattern("CodeSystem\\/[a-zA-Z0-9\\.\\-]+"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testUploadLoinc() throws Exception {
|
public void testUploadLoinc() throws Exception {
|
||||||
byte[] packageBytes = createLoincZip();
|
byte[] packageBytes = createLoincZip();
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||||
|
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||||
|
import ca.uhn.fhir.util.ClasspathUtil;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.io.FileDescriptor;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||||
|
|
||||||
|
public class TerminologyLoaderSvcIcd10cmJpaTest extends BaseJpaR4Test {
|
||||||
|
private TermLoaderSvcImpl mySvc;
|
||||||
|
private ZipCollectionBuilder myFiles;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void before() {
|
||||||
|
mySvc = new TermLoaderSvcImpl(myTerminologyDeferredStorageSvc, myTermCodeSystemStorageSvc);
|
||||||
|
|
||||||
|
myFiles = new ZipCollectionBuilder();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLoadIcd10cm() throws IOException {
|
||||||
|
String filename = "icd/icd10cm_tabular_2021.xml";
|
||||||
|
|
||||||
|
String resource = ClasspathUtil.loadResource(filename);
|
||||||
|
List<ITermLoaderSvc.FileDescriptor> descriptors = new ArrayList<>();
|
||||||
|
descriptors.add(new ITermLoaderSvc.ByteArrayFileDescriptor(filename, resource.getBytes(StandardCharsets.UTF_8)));
|
||||||
|
mySvc.loadIcd10cm(descriptors, new SystemRequestDetails());
|
||||||
|
|
||||||
|
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(1, myTermCodeSystemDao.count());
|
||||||
|
assertEquals(1, myTermCodeSystemVersionDao.count());
|
||||||
|
assertEquals(0, myTermValueSetDao.count());
|
||||||
|
assertEquals(0, myTermConceptMapDao.count());
|
||||||
|
assertEquals(1, myResourceTableDao.count());
|
||||||
|
assertEquals(17, myTermConceptDao.count());
|
||||||
|
TermCodeSystem codeSystem = myTermCodeSystemDao.findByCodeSystemUri(ITermLoaderSvc.ICD10CM_URI);
|
||||||
|
|
||||||
|
assertEquals("2021", codeSystem.getCurrentVersion().getCodeSystemVersionId());
|
||||||
|
|
||||||
|
TermCodeSystemVersion codeSystemVersion = myTermCodeSystemVersionDao.findByCodeSystemPidAndVersion(codeSystem.getPid(), "2021");
|
||||||
|
assertEquals(codeSystem.getCurrentVersion().getPid(), codeSystemVersion.getPid());
|
||||||
|
assertEquals(codeSystem.getResource().getId(), codeSystemVersion.getResource().getId());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,46 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.icd10cm;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.util.ClasspathUtil;
|
||||||
|
import org.hamcrest.Matchers;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.xml.sax.SAXException;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
public class Icd10CmLoaderTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLoadIcd10Cm() throws IOException, SAXException {
|
||||||
|
StringReader reader = new StringReader(ClasspathUtil.loadResource("icd/icd10cm_tabular_2021.xml"));
|
||||||
|
TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||||
|
Icd10CmLoader loader = new Icd10CmLoader(codeSystemVersion);
|
||||||
|
loader.load(reader);
|
||||||
|
|
||||||
|
assertEquals("2021", codeSystemVersion.getCodeSystemVersionId());
|
||||||
|
|
||||||
|
List<TermConcept> rootConcepts = new ArrayList<>(codeSystemVersion.getConcepts());
|
||||||
|
assertEquals(2, rootConcepts.size());
|
||||||
|
assertEquals("A00", rootConcepts.get(0).getCode());
|
||||||
|
assertEquals("Cholera", rootConcepts.get(0).getDisplay());
|
||||||
|
List<String> conceptNames = rootConcepts.stream().map(t -> t.getCode()).collect(Collectors.toList());
|
||||||
|
assertThat(conceptNames.toString(), conceptNames, Matchers.contains("A00", "A01"));
|
||||||
|
|
||||||
|
assertEquals(3, rootConcepts.get(0).getChildCodes().size());
|
||||||
|
TermConcept firstChildCode = rootConcepts.get(0).getChildCodes().get(0);
|
||||||
|
assertEquals("A00.0", firstChildCode.getCode());
|
||||||
|
assertEquals("Cholera due to Vibrio cholerae 01, biovar cholerae", firstChildCode.getDisplay());
|
||||||
|
conceptNames = rootConcepts.get(0).getChildCodes().stream().map(t -> t.getCode()).collect(Collectors.toList());
|
||||||
|
assertThat(conceptNames.toString(), conceptNames, Matchers.contains("A00.0", "A00.1", "A00.9"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,105 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<ICD10CM.tabular>
|
||||||
|
<version>2021</version>
|
||||||
|
<introduction>
|
||||||
|
<introSection type="title">
|
||||||
|
<title>ICD-10-CM TABULAR LIST of DISEASES and INJURIES</title>
|
||||||
|
</introSection>
|
||||||
|
</introduction>
|
||||||
|
<chapter>
|
||||||
|
<name>1</name>
|
||||||
|
<desc>Certain infectious and parasitic diseases (A00-B99)</desc>
|
||||||
|
<sectionIndex>
|
||||||
|
<sectionRef first="A00" last="A09" id="A00-A09">
|
||||||
|
Intestinal infectious diseases
|
||||||
|
</sectionRef>
|
||||||
|
</sectionIndex>
|
||||||
|
<section id="A00-A09">
|
||||||
|
<desc>Intestinal infectious diseases (A00-A09)</desc>
|
||||||
|
<diag>
|
||||||
|
<name>A00</name>
|
||||||
|
<desc>Cholera</desc>
|
||||||
|
<diag>
|
||||||
|
<name>A00.0</name>
|
||||||
|
<desc>Cholera due to Vibrio cholerae 01, biovar cholerae</desc>
|
||||||
|
<inclusionTerm>
|
||||||
|
<note>Classical cholera</note>
|
||||||
|
</inclusionTerm>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A00.1</name>
|
||||||
|
<desc>Cholera due to Vibrio cholerae 01, biovar eltor</desc>
|
||||||
|
<inclusionTerm>
|
||||||
|
<note>Cholera eltor</note>
|
||||||
|
</inclusionTerm>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A00.9</name>
|
||||||
|
<desc>Cholera, unspecified</desc>
|
||||||
|
</diag>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01</name>
|
||||||
|
<desc>Typhoid and paratyphoid fevers</desc>
|
||||||
|
<diag>
|
||||||
|
<name>A01.0</name>
|
||||||
|
<desc>Typhoid fever</desc>
|
||||||
|
<inclusionTerm>
|
||||||
|
<note>Infection due to Salmonella typhi</note>
|
||||||
|
</inclusionTerm>
|
||||||
|
<diag>
|
||||||
|
<name>A01.00</name>
|
||||||
|
<desc>Typhoid fever, unspecified</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.01</name>
|
||||||
|
<desc>Typhoid meningitis</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.02</name>
|
||||||
|
<desc>Typhoid fever with heart involvement</desc>
|
||||||
|
<inclusionTerm>
|
||||||
|
<note>Typhoid endocarditis</note>
|
||||||
|
<note>Typhoid myocarditis</note>
|
||||||
|
</inclusionTerm>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.03</name>
|
||||||
|
<desc>Typhoid pneumonia</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.04</name>
|
||||||
|
<desc>Typhoid arthritis</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.05</name>
|
||||||
|
<desc>Typhoid osteomyelitis</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.09</name>
|
||||||
|
<desc>Typhoid fever with other complications</desc>
|
||||||
|
</diag>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.1</name>
|
||||||
|
<desc>Paratyphoid fever A</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.2</name>
|
||||||
|
<desc>Paratyphoid fever B</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.3</name>
|
||||||
|
<desc>Paratyphoid fever C</desc>
|
||||||
|
</diag>
|
||||||
|
<diag>
|
||||||
|
<name>A01.4</name>
|
||||||
|
<desc>Paratyphoid fever, unspecified</desc>
|
||||||
|
<inclusionTerm>
|
||||||
|
<note>Infection due to Salmonella paratyphi NOS</note>
|
||||||
|
</inclusionTerm>
|
||||||
|
</diag>
|
||||||
|
</diag>
|
||||||
|
</section>
|
||||||
|
</chapter>
|
||||||
|
</ICD10CM.tabular>
|
Loading…
Reference in New Issue