More work on terminology server
This commit is contained in:
parent
d966190f9e
commit
87eabe6bf8
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
|||
|
||||
public class ValidateUtil {
|
||||
|
||||
public static void isNotNullOrThrowInvalidRequest(boolean theSuccess, String theMessage) {
|
||||
public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage) {
|
||||
if (theSuccess == false) {
|
||||
throw new InvalidRequestException(theMessage);
|
||||
}
|
||||
|
|
|
@ -11,10 +11,10 @@ public class ValidateUtilTest {
|
|||
|
||||
@Test
|
||||
public void testValidate() {
|
||||
ValidateUtil.isNotNullOrThrowInvalidRequest(true, "");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(true, "");
|
||||
|
||||
try {
|
||||
ValidateUtil.isNotNullOrThrowInvalidRequest(false, "The message");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(false, "The message");
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("The message", e.getMessage());
|
||||
|
|
|
@ -130,8 +130,8 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
throw new ParseException("No URL provided");
|
||||
}
|
||||
|
||||
String datafile = theCommandLine.getOptionValue("d");
|
||||
if (isBlank(datafile)) {
|
||||
String[] datafile = theCommandLine.getOptionValues("d");
|
||||
if (datafile == null || datafile.length == 0) {
|
||||
throw new ParseException("No data file provided");
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,9 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||
Parameters p = new Parameters();
|
||||
p.addParameter().setName("url").setValue(new UriType(termUrl));
|
||||
p.addParameter().setName("localfile").setValue(new StringType(datafile));
|
||||
for (String next : datafile) {
|
||||
p.addParameter().setName("localfile").setValue(new StringType(next));
|
||||
}
|
||||
inputParameters = p;
|
||||
} else {
|
||||
throw new ParseException("This command does not support FHIR version " + ctx.getVersion().getVersion());
|
||||
|
|
Binary file not shown.
|
@ -1,8 +1,8 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<!-- The parent of this project is the deployable POM. This project isn't deployable, but this keeps it before the root pom in the reactor order when building the
|
||||
site. I don't know why this works... Need to investigate this. -->
|
||||
<!-- The parent of this project is the deployable POM. This project isn't deployable, but this keeps it before the root pom in the reactor order when building the site. I don't know why this works...
|
||||
Need to investigate this. -->
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
|
@ -74,7 +74,6 @@
|
|||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>${slf4j_target_version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
|
@ -119,10 +118,15 @@
|
|||
<artifactId>jetty-util</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xmlunit</groupId>
|
||||
<artifactId>xmlunit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>net.sf.json-lib</groupId>
|
||||
<artifactId>json-lib</artifactId>
|
||||
<version>2.4</version>
|
||||
<classifier>jdk15</classifier>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
|
@ -135,14 +139,12 @@
|
|||
<dependency>
|
||||
<groupId>net.sf.json-lib</groupId>
|
||||
<artifactId>json-lib</artifactId>
|
||||
<version>2.4</version>
|
||||
<classifier>jdk15-sources</classifier>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>directory-naming</groupId>
|
||||
<artifactId>naming-java</artifactId>
|
||||
<version>0.8</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
|
@ -298,9 +300,9 @@
|
|||
<runOrder>alphabetical</runOrder>
|
||||
<argLine>-Xmx1624m</argLine>
|
||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||
<!--<reportFormat>plain</reportFormat>-->
|
||||
<!--<argLine>-Xmx600m -XX:+HeapDumpOnOutOfMemoryError</argLine>-->
|
||||
<!--<reuseForks>false</reuseForks>-->
|
||||
<!--<reportFormat>plain</reportFormat> -->
|
||||
<!--<argLine>-Xmx600m -XX:+HeapDumpOnOutOfMemoryError</argLine> -->
|
||||
<!--<reuseForks>false</reuseForks> -->
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
@ -335,21 +337,22 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.7.6.201602180812</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prepare-agent</id>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!--
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.7.6.201602180812</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prepare-agent</id>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
|
||||
</plugins>
|
||||
<resources>
|
||||
</resources>
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.dao.dstu3;
|
|||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import java.nio.file.FileVisitOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
@ -40,6 +41,8 @@ import org.hl7.fhir.dstu3.model.Coding;
|
|||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetFilterComponent;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent;
|
||||
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
|
||||
|
@ -68,28 +71,19 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
@Override
|
||||
public ValueSet expand(IIdType theId, String theFilter) {
|
||||
ValueSet source = myValidationSupport.fetchResource(getContext(), ValueSet.class, theId.getValue());
|
||||
ValueSet retVal = doExpand(source, theFilter);
|
||||
ValueSet retVal = doExpand(source);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource, String theFilter) {
|
||||
|
||||
private ValueSet doExpand(ValueSet theSource) {
|
||||
|
||||
validateIncludes("include", theSource.getCompose().getInclude());
|
||||
validateIncludes("exclude", theSource.getCompose().getExclude());
|
||||
|
||||
|
||||
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);
|
||||
String filterLc = theFilter != null ? theFilter.toLowerCase() : null;
|
||||
|
||||
ValueSetExpansionOutcome outcome = workerContext.expand(theSource);
|
||||
ValueSetExpansionComponent expansion = outcome.getValueset().getExpansion();
|
||||
if (isNotBlank(theFilter)) {
|
||||
for (Iterator<ValueSetExpansionContainsComponent> containsIter = expansion.getContains().iterator(); containsIter.hasNext();) {
|
||||
ValueSetExpansionContainsComponent nextContains = containsIter.next();
|
||||
if (!nextContains.getDisplay().toLowerCase().contains(filterLc) && !nextContains.getCode().toLowerCase().contains(filterLc)) {
|
||||
containsIter.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
ValueSetExpansionComponent expansion = outcome.getValueset().getExpansion();
|
||||
|
||||
ValueSet retVal = new ValueSet();
|
||||
retVal.setExpansion(expansion);
|
||||
|
@ -109,35 +103,46 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
if (isBlank(theUri)) {
|
||||
throw new InvalidRequestException("URI must not be blank or missing");
|
||||
}
|
||||
ValueSet source = myValidationSupport.fetchResource(getContext(), ValueSet.class, theUri);
|
||||
|
||||
ValueSet retVal = doExpand(source, theFilter);
|
||||
ValueSet source = new ValueSet();
|
||||
|
||||
source.getCompose().addImport(theUri);
|
||||
|
||||
if (isNotBlank(theFilter)) {
|
||||
ConceptSetComponent include = source.getCompose().addInclude();
|
||||
ConceptSetFilterComponent filter = include.addFilter();
|
||||
filter.setProperty("display");
|
||||
filter.setOp(FilterOperator.EQUAL);
|
||||
filter.setValue(theFilter);
|
||||
}
|
||||
|
||||
ValueSet retVal = doExpand(source);
|
||||
return retVal;
|
||||
|
||||
// if (defaultValueSet != null) {
|
||||
// source = getContext().newJsonParser().parseResource(ValueSet.class, getContext().newJsonParser().encodeResourceToString(defaultValueSet));
|
||||
// } else {
|
||||
// IBundleProvider ids = search(ValueSet.SP_URL, new UriParam(theUri));
|
||||
// if (ids.size() == 0) {
|
||||
// throw new InvalidRequestException("Unknown ValueSet URI: " + theUri);
|
||||
// }
|
||||
// source = (ValueSet) ids.getResources(0, 1).get(0);
|
||||
// }
|
||||
//
|
||||
// return expand(defaultValueSet, theFilter);
|
||||
// if (defaultValueSet != null) {
|
||||
// source = getContext().newJsonParser().parseResource(ValueSet.class, getContext().newJsonParser().encodeResourceToString(defaultValueSet));
|
||||
// } else {
|
||||
// IBundleProvider ids = search(ValueSet.SP_URL, new UriParam(theUri));
|
||||
// if (ids.size() == 0) {
|
||||
// throw new InvalidRequestException("Unknown ValueSet URI: " + theUri);
|
||||
// }
|
||||
// source = (ValueSet) ids.getResources(0, 1).get(0);
|
||||
// }
|
||||
//
|
||||
// return expand(defaultValueSet, theFilter);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expand(ValueSet source, String theFilter) {
|
||||
ValueSet retVal = doExpand(source, theFilter);
|
||||
ValueSet retVal = doExpand(source);
|
||||
return retVal;
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode, IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding,
|
||||
CodeableConcept theCodeableConcept) {
|
||||
public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode,
|
||||
IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding, CodeableConcept theCodeableConcept) {
|
||||
|
||||
List<IIdType> valueSetIds = Collections.emptyList();
|
||||
List<IIdType> codeSystemIds = Collections.emptyList();
|
||||
|
@ -166,8 +171,8 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
if (theCode == null || theCode.isEmpty()) {
|
||||
throw new InvalidRequestException("Either ValueSet ID or ValueSet identifier or system and code must be provided. Unable to validate.");
|
||||
}
|
||||
// String code = theCode.getValue();
|
||||
// String system = toStringOrNull(theSystem);
|
||||
// String code = theCode.getValue();
|
||||
// String system = toStringOrNull(theSystem);
|
||||
LookupCodeResult result = myCodeSystemDao.lookupCode(theCode, theSystem, null, null);
|
||||
if (result.isFound()) {
|
||||
ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult retVal = new ValidateCodeResult(true, "Found code", result.getCodeDisplay());
|
||||
|
@ -210,7 +215,8 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
return thePrimitive != null ? thePrimitive.getValue() : null;
|
||||
}
|
||||
|
||||
private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode, Coding theCoding, CodeableConcept theCodeableConcept) {
|
||||
private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode,
|
||||
Coding theCoding, CodeableConcept theCodeableConcept) {
|
||||
for (ValueSetExpansionContainsComponent nextCode : contains) {
|
||||
ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept);
|
||||
if (result != null) {
|
||||
|
|
|
@ -38,6 +38,8 @@ import javax.persistence.SequenceGenerator;
|
|||
import javax.persistence.Table;
|
||||
import javax.persistence.UniqueConstraint;
|
||||
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
|
||||
//@formatter:off
|
||||
@Table(name="TRM_CODESYSTEM_VER", uniqueConstraints= {
|
||||
@UniqueConstraint(name="IDX_CSV_RESOURCEPID_AND_VER", columnNames= {"RES_ID", "RES_VERSION_ID"})
|
||||
|
@ -74,6 +76,44 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
return myId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((myResource.getId() == null) ? 0 : myResource.getId().hashCode());
|
||||
result = prime * result + ((myResourceVersionId == null) ? 0 : myResourceVersionId.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@CoverageIgnore
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (!(obj instanceof TermCodeSystemVersion)) {
|
||||
return false;
|
||||
}
|
||||
TermCodeSystemVersion other = (TermCodeSystemVersion) obj;
|
||||
if ((myResource.getId() == null) != (other.myResource.getId() == null)) {
|
||||
return false;
|
||||
} else if (!myResource.getId().equals(other.myResource.getId())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (myResourceVersionId == null) {
|
||||
if (other.myResourceVersionId != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!myResourceVersionId.equals(other.myResourceVersionId)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public ResourceTable getResource() {
|
||||
return myResource;
|
||||
}
|
||||
|
|
|
@ -106,7 +106,7 @@ public class TermConcept implements Serializable {
|
|||
})
|
||||
private String myParentPids;
|
||||
|
||||
@OneToMany(cascade=CascadeType.ALL, fetch=FetchType.LAZY, mappedBy="myChild")
|
||||
@OneToMany(cascade= {}, fetch=FetchType.LAZY, mappedBy="myChild")
|
||||
private Collection<TermConceptParentChildLink> myParents;
|
||||
|
||||
@Column(name="CODESYSTEM_PID", insertable=false, updatable=false)
|
||||
|
@ -131,6 +131,8 @@ public class TermConcept implements Serializable {
|
|||
link.setChild(theChild);
|
||||
link.setRelationshipType(theRelationshipType);
|
||||
getChildren().add(link);
|
||||
|
||||
theChild.getParents().add(link);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -150,12 +152,10 @@ public class TermConcept implements Serializable {
|
|||
}
|
||||
|
||||
TermConcept obj = (TermConcept)theObj;
|
||||
if (obj.myId == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
EqualsBuilder b = new EqualsBuilder();
|
||||
b.append(myId, obj.myId);
|
||||
b.append(myCodeSystem, obj.myCodeSystem);
|
||||
b.append(myCode, obj.myCode);
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
|
@ -192,7 +192,8 @@ public class TermConcept implements Serializable {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
HashCodeBuilder b = new HashCodeBuilder();
|
||||
b.append(myId);
|
||||
b.append(myCodeSystem);
|
||||
b.append(myCode);
|
||||
return b.toHashCode();
|
||||
}
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ public class TermConceptParentChildLink implements Serializable {
|
|||
@JoinColumn(name="CODESYSTEM_PID", nullable=false, foreignKey=@ForeignKey(name="FK_TERM_CONCEPTPC_CS"))
|
||||
private TermCodeSystemVersion myCodeSystem;
|
||||
|
||||
@ManyToOne()
|
||||
@ManyToOne(cascade= {})
|
||||
@JoinColumn(name="PARENT_PID", nullable=false, referencedColumnName="PID", foreignKey=@ForeignKey(name="FK_TERM_CONCEPTPC_PARENT"))
|
||||
private TermConcept myParent;
|
||||
|
||||
|
|
|
@ -4,13 +4,14 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
|
|||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.output.LockableFileWriter;
|
||||
import org.hl7.fhir.dstu3.model.Attachment;
|
||||
import org.hl7.fhir.dstu3.model.IntegerType;
|
||||
import org.hl7.fhir.dstu3.model.Parameters;
|
||||
|
@ -37,29 +38,34 @@ public class TerminologyUploaderProviderDstu3 extends BaseJpaProvider {
|
|||
@Operation(name = "$upload-external-code-system", idempotent = false, returnParameters= {
|
||||
@OperationParam(name="conceptCount", type=IntegerType.class, min=1)
|
||||
})
|
||||
public Parameters lookup(
|
||||
public Parameters uploadExternalCodeSystem(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name="url", min=1) UriType theUrl,
|
||||
@OperationParam(name="package", min=0) Attachment thePackage,
|
||||
@OperationParam(name="localfile", min=0) StringType theLocalFile,
|
||||
@OperationParam(name="localfile", min=0, max=OperationParam.MAX_UNLIMITED) List<StringType> theLocalFile,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
//@formatter:on
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
byte[] data;
|
||||
if (theLocalFile != null && isNotBlank(theLocalFile.getValue())) {
|
||||
ourLog.info("Reading in local file: {}", theLocalFile.getValue());
|
||||
try {
|
||||
data = IOUtils.toByteArray(new FileInputStream(theLocalFile.getValue()));
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
List<byte[]> data = new ArrayList<byte[]>();
|
||||
if (theLocalFile != null && theLocalFile.size() > 0) {
|
||||
for (StringType nextLocalFile : theLocalFile) {
|
||||
if (isNotBlank(nextLocalFile.getValue())) {
|
||||
ourLog.info("Reading in local file: {}", nextLocalFile.getValue());
|
||||
try {
|
||||
byte[] nextData = IOUtils.toByteArray(new FileInputStream(nextLocalFile.getValue()));
|
||||
data.add(nextData);
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (thePackage == null || thePackage.getData() == null || thePackage.getData().length == 0) {
|
||||
throw new InvalidRequestException("No 'localfile' or 'package' parameter, or package had no data");
|
||||
} else {
|
||||
data = thePackage.getData();
|
||||
data = Arrays.asList(thePackage.getData());
|
||||
}
|
||||
|
||||
String url = theUrl != null ? theUrl.getValueAsString() : null;
|
||||
|
@ -67,9 +73,9 @@ public class TerminologyUploaderProviderDstu3 extends BaseJpaProvider {
|
|||
|
||||
UploadStatistics stats;
|
||||
if (IHapiTerminologyLoaderSvc.SCT_URL.equals(url)) {
|
||||
stats = myTerminologyLoaderSvc.loadSnomedCt(data, theRequestDetails);
|
||||
stats = myTerminologyLoaderSvc.loadSnomedCt((data), theRequestDetails);
|
||||
} else if (IHapiTerminologyLoaderSvc.LOINC_URL.equals(url)) {
|
||||
stats = myTerminologyLoaderSvc.loadLoinc(data, theRequestDetails);
|
||||
stats = myTerminologyLoaderSvc.loadLoinc((data), theRequestDetails);
|
||||
} else {
|
||||
throw new InvalidRequestException("Unknown URL: " + url);
|
||||
}
|
||||
|
|
|
@ -202,30 +202,25 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
return cs;
|
||||
}
|
||||
|
||||
private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack, HashSet<Long> thePidsInHierarchy) {
|
||||
private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack, int theTotalConcepts) {
|
||||
if (theConceptsStack.put(theConcept, PLACEHOLDER_OBJECT) != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (theConceptsStack.size() % 10000 == 0) {
|
||||
ourLog.info("Have saved {} concepts", theConceptsStack.size());
|
||||
if (theConceptsStack.size() % 1000 == 0) {
|
||||
float pct = (float) theConceptsStack.size() / (float) theTotalConcepts;
|
||||
ourLog.info("Have saved {}/{} concepts - {}%", theConceptsStack.size(), theTotalConcepts, (int)( pct*100.0f));
|
||||
}
|
||||
|
||||
theConcept.setParentPids(thePidsInHierarchy);
|
||||
theConcept.setCodeSystem(theCodeSystem);
|
||||
|
||||
TermConcept flushedConcept = myConceptDao.saveAndFlush(theConcept);
|
||||
thePidsInHierarchy.add(flushedConcept.getId());
|
||||
try {
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
persistChildren(next.getChild(), theCodeSystem, theConceptsStack, thePidsInHierarchy);
|
||||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
myConceptParentChildLinkDao.save(next);
|
||||
}
|
||||
} finally {
|
||||
thePidsInHierarchy.remove(flushedConcept.getId());
|
||||
myConceptDao.save(theConcept);
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
persistChildren(next.getChild(), theCodeSystem, theConceptsStack, theTotalConcepts);
|
||||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
myConceptParentChildLinkDao.save(next);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,12 +229,16 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
public void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
ourLog.info("Storing code system");
|
||||
|
||||
ValidateUtil.isNotNullOrThrowInvalidRequest(theCodeSystemVersion.getResource() != null, "No resource supplied");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theCodeSystemVersion.getResource() != null, "No resource supplied");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystemUri, "No system URI supplied");
|
||||
|
||||
// Grab the existing versions so we can delete them later
|
||||
List<TermCodeSystemVersion> existing = myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystemResourcePid);
|
||||
|
||||
/*
|
||||
* Do the upload
|
||||
*/
|
||||
|
||||
TermCodeSystem codeSystem = getCodeSystem(theSystemUri);
|
||||
if (codeSystem == null) {
|
||||
codeSystem = myCodeSystemDao.findByResourcePid(theCodeSystemResourcePid);
|
||||
|
@ -251,7 +250,8 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
myCodeSystemDao.save(codeSystem);
|
||||
} else {
|
||||
if (!ObjectUtil.equals(codeSystem.getResource().getId(), theCodeSystemVersion.getResource().getId())) {
|
||||
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvc.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri, codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvc.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri,
|
||||
codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
}
|
||||
|
@ -260,9 +260,10 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
|
||||
// Validate the code system
|
||||
IdentityHashMap<TermConcept, Object> conceptsStack = new IdentityHashMap<TermConcept, Object>();
|
||||
IdentityHashMap<TermConcept, Object> allConcepts = new IdentityHashMap<TermConcept, Object>();
|
||||
int totalCodeCount = 0;
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
totalCodeCount += validateConceptForStorage(next, theCodeSystemVersion, conceptsStack);
|
||||
totalCodeCount += validateConceptForStorage(next, theCodeSystemVersion, conceptsStack, allConcepts);
|
||||
}
|
||||
|
||||
ourLog.info("Saving version");
|
||||
|
@ -278,22 +279,56 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
|
||||
conceptsStack = new IdentityHashMap<TermConcept, Object>();
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
persistChildren(next, codeSystemVersion, conceptsStack, new HashSet<Long>());
|
||||
persistChildren(next, codeSystemVersion, conceptsStack, totalCodeCount);
|
||||
}
|
||||
|
||||
ourLog.info("Done saving concepts, flushing to database");
|
||||
|
||||
myConceptDao.flush();
|
||||
myConceptParentChildLinkDao.flush();
|
||||
|
||||
ourLog.info("Building multi-axial hierarchy...");
|
||||
|
||||
int index = 0;
|
||||
int totalParents = 0;
|
||||
for (TermConcept nextConcept : conceptsStack.keySet()) {
|
||||
|
||||
if (index++ % 1000 == 0) {
|
||||
float pct = (float) index / (float) totalCodeCount;
|
||||
ourLog.info("Have built hierarchy for {}/{} concepts - {}%", index, totalCodeCount, (int)( pct*100.0f));
|
||||
}
|
||||
|
||||
Set<Long> parentPids = new HashSet<Long>();
|
||||
parentPids(nextConcept, parentPids);
|
||||
nextConcept.setParentPids(parentPids);
|
||||
totalParents += parentPids.size();
|
||||
|
||||
myConceptDao.save(nextConcept);
|
||||
}
|
||||
|
||||
ourLog.info("Done building hierarchy, found {} parents", totalParents);
|
||||
|
||||
/*
|
||||
* For now we always delete old versions.. At some point it would be
|
||||
* nice to allow configuration to keep old versions
|
||||
* For now we always delete old versions.. At some point it would be nice to allow configuration to keep old versions
|
||||
*/
|
||||
|
||||
ourLog.info("Deleting old sode system versions");
|
||||
ourLog.info("Deleting old code system versions");
|
||||
for (TermCodeSystemVersion next : existing) {
|
||||
ourLog.info(" * Deleting code system version {}", next.getPid());
|
||||
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
|
||||
myConceptDao.deleteByCodeSystemVersion(next.getPid());
|
||||
}
|
||||
|
||||
ourLog.info("Done saving code system");
|
||||
ourLog.info("Done deleting old code system versions");
|
||||
}
|
||||
|
||||
private void parentPids(TermConcept theNextConcept, Set<Long> theParentPids) {
|
||||
for (TermConceptParentChildLink nextParentLink : theNextConcept.getParents()){
|
||||
TermConcept parent = nextParentLink.getParent();
|
||||
if (parent != null && theParentPids.add(parent.getId())) {
|
||||
parentPids(parent, theParentPids);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -310,18 +345,27 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack) {
|
||||
ValidateUtil.isNotNullOrThrowInvalidRequest(theConcept.getCodeSystem() == theCodeSystem, "Codesystem contains a code which does not reference the codesystem");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "Codesystem contains a code which does not reference the codesystem");
|
||||
private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack,
|
||||
IdentityHashMap<TermConcept, Object> theAllConcepts) {
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystem() != null, "CodesystemValue is null");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystem() == theCodeSystem, "CodeSystems are not equal");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "Codesystem contains a code with no code value");
|
||||
|
||||
if (theConceptsStack.put(theConcept, PLACEHOLDER_OBJECT) != null) {
|
||||
throw new InvalidRequestException("CodeSystem contains circular reference around code " + theConcept.getCode());
|
||||
}
|
||||
|
||||
int retVal = 1;
|
||||
int retVal = 0;
|
||||
if (theAllConcepts.put(theConcept, theAllConcepts) == null) {
|
||||
if (theAllConcepts.size() % 1000 == 0) {
|
||||
ourLog.info("Have validated {} concepts", theAllConcepts.size());
|
||||
}
|
||||
retVal = 1;
|
||||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
next.setCodeSystem(theCodeSystem);
|
||||
retVal += validateConceptForStorage(next.getChild(), theCodeSystem, theConceptsStack);
|
||||
retVal += validateConceptForStorage(next.getChild(), theCodeSystem, theConceptsStack, theAllConcepts);
|
||||
}
|
||||
|
||||
theConceptsStack.remove(theConcept);
|
||||
|
|
|
@ -28,9 +28,6 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.BooleanQuery.Builder;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.FullTextQuery;
|
||||
|
@ -56,8 +53,11 @@ import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcom
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
|
@ -99,11 +99,21 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation=Propagation.REQUIRED)
|
||||
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails) {
|
||||
CodeSystem cs = new org.hl7.fhir.dstu3.model.CodeSystem();
|
||||
cs.setUrl(theSystem);
|
||||
cs.setContent(CodeSystemContentMode.NOTPRESENT);
|
||||
IIdType csId = myCodeSystemResourceDao.create(cs, "CodeSystem?url=" + UrlUtil.escape(theSystem), theRequestDetails).getId().toUnqualifiedVersionless();
|
||||
|
||||
DaoMethodOutcome createOutcome = myCodeSystemResourceDao.create(cs, "CodeSystem?url=" + UrlUtil.escape(theSystem), theRequestDetails);
|
||||
IIdType csId = createOutcome.getId().toUnqualifiedVersionless();
|
||||
if (createOutcome.getCreated() != Boolean.TRUE) {
|
||||
CodeSystem existing = myCodeSystemResourceDao.read(csId, theRequestDetails);
|
||||
csId = myCodeSystemResourceDao.update(existing, theRequestDetails).getId();
|
||||
|
||||
ourLog.info("Created new version of CodeSystem, got ID: {}", csId.toUnqualified().getValue());
|
||||
}
|
||||
|
||||
ResourceTable resource = (ResourceTable) myCodeSystemResourceDao.readEntity(csId);
|
||||
Long codeSystemResourcePid = resource.getId();
|
||||
|
||||
|
@ -187,6 +197,8 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
}
|
||||
}
|
||||
|
||||
retVal.setTotal(retVal.getContains().size());
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import ca.uhn.fhir.rest.method.RequestDetails;
|
||||
|
||||
public interface IHapiTerminologyLoaderSvc {
|
||||
|
@ -7,9 +9,9 @@ public interface IHapiTerminologyLoaderSvc {
|
|||
String LOINC_URL = "http://loinc.org";
|
||||
String SCT_URL = "http://snomed.info/sct";
|
||||
|
||||
UploadStatistics loadLoinc(byte[] theZipBytes, RequestDetails theRequestDetails);
|
||||
UploadStatistics loadLoinc(List<byte[]> theZipBytes, RequestDetails theRequestDetails);
|
||||
|
||||
UploadStatistics loadSnomedCt(byte[] theZipBytes, RequestDetails theRequestDetails);
|
||||
UploadStatistics loadSnomedCt(List<byte[]> theZipBytes, RequestDetails theRequestDetails);
|
||||
|
||||
public static class UploadStatistics {
|
||||
private final int myConceptCount;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
|
@ -60,6 +61,7 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc.LoincHierarchyHandler;
|
||||
import ca.uhn.fhir.rest.method.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -68,14 +70,15 @@ import ca.uhn.fhir.util.CoverageIgnore;
|
|||
public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||
public static final String LOINC_FILE = "loinc.csv";
|
||||
|
||||
public static final String LOINC_HIERARCHY_FILE = "MULTI-AXIAL_HIERARCHY.CSV";
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvc.class);
|
||||
|
||||
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
||||
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
||||
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTermSvc;
|
||||
|
||||
|
||||
private void cleanUpTemporaryFiles(Map<String, File> filenameToFile) {
|
||||
ourLog.info("Finished terminology file import, cleaning up temporary files");
|
||||
for (File nextFile : filenameToFile.values()) {
|
||||
|
@ -90,7 +93,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
TermConceptParentChildLink next = childIter.next();
|
||||
TermConcept nextChild = next.getChild();
|
||||
if (theChain.contains(nextChild.getCode())) {
|
||||
|
||||
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Removing circular reference code ");
|
||||
b.append(nextChild.getCode());
|
||||
|
@ -111,45 +114,48 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
}
|
||||
theChain.remove(theConcept.getCode());
|
||||
|
||||
|
||||
}
|
||||
|
||||
private Map<String, File> extractFiles(byte[] theZipBytes, List<String> theExpectedFilenameFragments) {
|
||||
private Map<String, File> extractFiles(List<byte[]> theZipBytes, List<String> theExpectedFilenameFragments) {
|
||||
Map<String, File> filenameToFile = new HashMap<String, File>();
|
||||
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(new ByteArrayInputStream(theZipBytes)));
|
||||
try {
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null;) {
|
||||
ZippedFileInputStream inputStream = new ZippedFileInputStream(zis);
|
||||
|
||||
boolean want = false;
|
||||
for (String next : theExpectedFilenameFragments) {
|
||||
if (nextEntry.getName().contains(next)) {
|
||||
want = true;
|
||||
for (byte[] nextZipBytes : theZipBytes) {
|
||||
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(new ByteArrayInputStream(nextZipBytes)));
|
||||
try {
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null;) {
|
||||
ZippedFileInputStream inputStream = new ZippedFileInputStream(zis);
|
||||
|
||||
boolean want = false;
|
||||
for (String next : theExpectedFilenameFragments) {
|
||||
if (nextEntry.getName().contains(next)) {
|
||||
want = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!want) {
|
||||
ourLog.info("Ignoring zip entry: {}", nextEntry.getName());
|
||||
continue;
|
||||
}
|
||||
|
||||
ourLog.info("Streaming ZIP entry {} into temporary file", nextEntry.getName());
|
||||
|
||||
File nextOutFile = File.createTempFile("hapi_fhir", ".csv");
|
||||
nextOutFile.deleteOnExit();
|
||||
OutputStream outputStream = new SinkOutputStream(new FileOutputStream(nextOutFile, false), nextEntry.getName());
|
||||
try {
|
||||
IOUtils.copyLarge(inputStream, outputStream);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(outputStream);
|
||||
}
|
||||
|
||||
filenameToFile.put(nextEntry.getName(), nextOutFile);
|
||||
}
|
||||
|
||||
if (!want) {
|
||||
ourLog.info("Ignoring zip entry: {}", nextEntry.getName());
|
||||
continue;
|
||||
}
|
||||
|
||||
ourLog.info("Streaming ZIP entry {} into temporary file", nextEntry.getName());
|
||||
|
||||
File nextOutFile = File.createTempFile("hapi_fhir", ".csv");
|
||||
nextOutFile.deleteOnExit();
|
||||
OutputStream outputStream = new SinkOutputStream(new FileOutputStream(nextOutFile, false), nextEntry.getName());
|
||||
try {
|
||||
IOUtils.copyLarge(inputStream, outputStream);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(outputStream);
|
||||
}
|
||||
|
||||
filenameToFile.put(nextEntry.getName(), nextOutFile);
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(zis);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(zis);
|
||||
}
|
||||
|
||||
if (filenameToFile.size() != theExpectedFilenameFragments.size()) {
|
||||
|
@ -158,6 +164,17 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
return filenameToFile;
|
||||
}
|
||||
|
||||
public String firstNonBlank(String... theStrings) {
|
||||
String retVal = "";
|
||||
for (String nextString : theStrings) {
|
||||
if (isNotBlank(nextString)) {
|
||||
retVal = nextString;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
|
||||
TermConcept concept = id2concept.get(id);
|
||||
if (concept == null) {
|
||||
|
@ -208,17 +225,17 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// This should always be true, but just in case we've introduced a bug...
|
||||
Validate.isTrue(found);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadLoinc(byte[] theZipBytes, RequestDetails theRequestDetails) {
|
||||
List<String> expectedFilenameFragments = Arrays.asList(LOINC_FILE);
|
||||
public UploadStatistics loadLoinc(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
||||
List<String> expectedFilenameFragments = Arrays.asList(LOINC_FILE, LOINC_HIERARCHY_FILE);
|
||||
|
||||
Map<String, File> filenameToFile = extractFiles(theZipBytes, expectedFilenameFragments);
|
||||
|
||||
|
||||
ourLog.info("Beginning LOINC processing");
|
||||
|
||||
try {
|
||||
|
@ -229,11 +246,11 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadSnomedCt(byte[] theZipBytes, RequestDetails theRequestDetails) {
|
||||
public UploadStatistics loadSnomedCt(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
||||
List<String> expectedFilenameFragments = Arrays.asList(SCT_FILE_DESCRIPTION, SCT_FILE_RELATIONSHIP, SCT_FILE_CONCEPT);
|
||||
|
||||
Map<String, File> filenameToFile = extractFiles(theZipBytes, expectedFilenameFragments);
|
||||
|
||||
|
||||
ourLog.info("Beginning SNOMED CT processing");
|
||||
|
||||
try {
|
||||
|
@ -250,11 +267,26 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
IRecordHandler handler = new LoincHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(filenameToFile, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
|
||||
ourLog.info("Have {} concepts", code2concept.size());
|
||||
|
||||
codeSystemVersion.getConcepts().addAll(code2concept.values());
|
||||
myTermSvc.storeNewCodeSystemVersion(SCT_URL, codeSystemVersion, theRequestDetails);
|
||||
|
||||
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(filenameToFile, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
|
||||
for (Iterator<Entry<String, TermConcept>> iter = code2concept.entrySet().iterator(); iter.hasNext();) {
|
||||
Entry<String, TermConcept> next = iter.next();
|
||||
// if (isBlank(next.getKey())) {
|
||||
// ourLog.info("Removing concept with blankc code[{}] and display [{}", next.getValue().getCode(), next.getValue().getDisplay());
|
||||
// iter.remove();
|
||||
// continue;
|
||||
// }
|
||||
TermConcept nextConcept = next.getValue();
|
||||
if (nextConcept.getParents().isEmpty()) {
|
||||
codeSystemVersion.getConcepts().add(nextConcept);
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("Have {} total concepts, {} root concepts", code2concept.size(), codeSystemVersion.getConcepts().size());
|
||||
|
||||
myTermSvc.storeNewCodeSystemVersion(LOINC_URL, codeSystemVersion, theRequestDetails);
|
||||
|
||||
return new UploadStatistics(code2concept.size());
|
||||
}
|
||||
|
||||
|
@ -265,18 +297,18 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
final Set<String> validConceptIds = new HashSet<String>();
|
||||
|
||||
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
|
||||
iterateOverZipFile(filenameToFile, SCT_FILE_CONCEPT, handler,'\t', null);
|
||||
iterateOverZipFile(filenameToFile, SCT_FILE_CONCEPT, handler, '\t', null);
|
||||
|
||||
ourLog.info("Have {} valid concept IDs", validConceptIds.size());
|
||||
|
||||
handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
|
||||
iterateOverZipFile(filenameToFile, SCT_FILE_DESCRIPTION, handler,'\t', null);
|
||||
iterateOverZipFile(filenameToFile, SCT_FILE_DESCRIPTION, handler, '\t', null);
|
||||
|
||||
ourLog.info("Got {} concepts, cloning map", code2concept.size());
|
||||
final HashMap<String, TermConcept> rootConcepts = new HashMap<String, TermConcept>(code2concept);
|
||||
|
||||
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
|
||||
iterateOverZipFile(filenameToFile, SCT_FILE_RELATIONSHIP, handler,'\t', null);
|
||||
iterateOverZipFile(filenameToFile, SCT_FILE_RELATIONSHIP, handler, '\t', null);
|
||||
|
||||
ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(), code2concept.size());
|
||||
|
||||
|
@ -286,10 +318,10 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
codeSystemVersion.getConcepts().addAll(rootConcepts.values());
|
||||
myTermSvc.storeNewCodeSystemVersion(SCT_URL, codeSystemVersion, theRequestDetails);
|
||||
|
||||
|
||||
return new UploadStatistics(code2concept.size());
|
||||
}
|
||||
|
||||
|
||||
@VisibleForTesting
|
||||
void setTermSvcForUnitTests(IHapiTerminologySvc theTermSvc) {
|
||||
myTermSvc = theTermSvc;
|
||||
|
@ -299,8 +331,8 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
public static void main(String[] args) throws Exception {
|
||||
TerminologyLoaderSvc svc = new TerminologyLoaderSvc();
|
||||
|
||||
// byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
|
||||
// svc.loadSnomedCt(bytes);
|
||||
// byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
|
||||
// svc.loadSnomedCt(bytes);
|
||||
|
||||
Map<String, File> files = new HashMap<String, File>();
|
||||
files.put(SCT_FILE_CONCEPT, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Concept_Full_INT_20160131.txt"));
|
||||
|
@ -326,16 +358,56 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String code = theRecord.get("LOINC_NUM");
|
||||
String longCommonName = theRecord.get("LONG_COMMON_NAME");
|
||||
String shortName = theRecord.get("SHORTNAME");
|
||||
String consumerName = theRecord.get("CONSUMER_NAME");
|
||||
String display = firstNonBlank(longCommonName, shortName, consumerName);
|
||||
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, code);
|
||||
concept.setDisplay(display);
|
||||
|
||||
Validate.isTrue(!myCode2Concept.containsKey(code));
|
||||
myCode2Concept.put(code, concept);
|
||||
if (isNotBlank(code)) {
|
||||
String longCommonName = theRecord.get("LONG_COMMON_NAME");
|
||||
String shortName = theRecord.get("SHORTNAME");
|
||||
String consumerName = theRecord.get("CONSUMER_NAME");
|
||||
String display = firstNonBlank(longCommonName, shortName, consumerName);
|
||||
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, code);
|
||||
concept.setDisplay(display);
|
||||
|
||||
Validate.isTrue(!myCode2Concept.containsKey(code));
|
||||
myCode2Concept.put(code, concept);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class LoincHierarchyHandler implements IRecordHandler {
|
||||
|
||||
private Map<String, TermConcept> myCode2Concept;
|
||||
private TermCodeSystemVersion myCodeSystemVersion;
|
||||
|
||||
public LoincHierarchyHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myCode2Concept = theCode2concept;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String parentCode = theRecord.get("IMMEDIATE_PARENT");
|
||||
String childCode = theRecord.get("CODE");
|
||||
String childCodeText = theRecord.get("CODE_TEXT");
|
||||
|
||||
if (isNotBlank(parentCode) && isNotBlank(childCode)) {
|
||||
TermConcept parent = getOrCreate(parentCode, "(unknown)");
|
||||
TermConcept child = getOrCreate(childCode, childCodeText);
|
||||
|
||||
parent.addChild(child, RelationshipTypeEnum.ISA);
|
||||
}
|
||||
}
|
||||
|
||||
private TermConcept getOrCreate(String theCode, String theDisplay) {
|
||||
TermConcept retVal = myCode2Concept.get(theCode);
|
||||
if (retVal == null) {
|
||||
retVal = new TermConcept();
|
||||
retVal.setCodeSystem(myCodeSystemVersion);
|
||||
retVal.setCode(theCode);
|
||||
retVal.setDisplay(theDisplay);
|
||||
myCode2Concept.put(theCode, retVal);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -428,18 +500,20 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
TermConcept sourceConcept = findConcept(myCode2concept, sourceId);
|
||||
TermConcept targetConcept = findConcept(myCode2concept, destinationId);
|
||||
if (typeConcept.getDisplay().equals("Is a (attribute)")) {
|
||||
TermConceptParentChildLink link = new TermConceptParentChildLink();
|
||||
link.setChild(sourceConcept);
|
||||
link.setParent(targetConcept);
|
||||
link.setRelationshipType(TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||
link.setCodeSystem(myCodeSystemVersion);
|
||||
myRootConcepts.remove(link.getChild().getCode());
|
||||
if (!sourceId.equals(destinationId)) {
|
||||
TermConceptParentChildLink link = new TermConceptParentChildLink();
|
||||
link.setChild(sourceConcept);
|
||||
link.setParent(targetConcept);
|
||||
link.setRelationshipType(TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||
link.setCodeSystem(myCodeSystemVersion);
|
||||
myRootConcepts.remove(link.getChild().getCode());
|
||||
|
||||
targetConcept.addChild(sourceConcept, RelationshipTypeEnum.ISA);
|
||||
targetConcept.addChild(sourceConcept, RelationshipTypeEnum.ISA);
|
||||
}
|
||||
} else if (ignoredTypes.contains(typeConcept.getDisplay())) {
|
||||
// ignore
|
||||
} else {
|
||||
// ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay());
|
||||
// ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -522,15 +596,4 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
}
|
||||
|
||||
public String firstNonBlank(String... theStrings) {
|
||||
String retVal = "";
|
||||
for (String nextString : theStrings) {
|
||||
if (isNotBlank(nextString)) {
|
||||
retVal = nextString;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -202,7 +202,11 @@ public class FhirResourceDaoDstu3ValueSetTest extends BaseJpaDstu3Test {
|
|||
assertThat(resp, not(containsString("<code value=\"8450-9\"/>")));
|
||||
}
|
||||
|
||||
/**
|
||||
* This type of expansion doesn't really make sense..
|
||||
*/
|
||||
@Test
|
||||
@Ignore
|
||||
public void testExpandByValueSet() throws IOException {
|
||||
ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-3-vs.xml");
|
||||
ValueSet expanded = myValueSetDao.expand(toExpand, "11378");
|
||||
|
|
|
@ -109,7 +109,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
.onType(ValueSet.class)
|
||||
.named("expand")
|
||||
.withParameter(Parameters.class, "identifier", new UriType("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"))
|
||||
.andParameter("filter", new StringType("11378"))
|
||||
.execute();
|
||||
ValueSet expanded = (ValueSet) respParam.getParameter().get(0).getResource();
|
||||
//@formatter:on
|
||||
|
@ -122,7 +121,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
"<display value=\"Systolic blood pressure at First encounter\"/>"));
|
||||
//@formatter:on
|
||||
|
||||
assertThat(resp, not(containsString("<code value=\"8450-9\"/>")));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -135,7 +133,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
.onType(ValueSet.class)
|
||||
.named("expand")
|
||||
.withParameter(Parameters.class, "valueSet", toExpand)
|
||||
.andParameter("filter", new StringType("11378"))
|
||||
.execute();
|
||||
ValueSet expanded = (ValueSet) respParam.getParameter().get(0).getResource();
|
||||
//@formatter:on
|
||||
|
@ -148,7 +145,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
"<display value=\"Systolic blood pressure at First encounter\"/>"));
|
||||
//@formatter:on
|
||||
|
||||
assertThat(resp, not(containsString("<code value=\"8450-9\"/>")));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -76,6 +76,24 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
|||
ourLog.info(resp);
|
||||
|
||||
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
||||
|
||||
/*
|
||||
* Try uploading a second time
|
||||
*/
|
||||
|
||||
//@formatter:off
|
||||
respParam = ourClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named("upload-external-code-system")
|
||||
.withParameter(Parameters.class, "url", new UriType(IHapiTerminologyLoaderSvc.LOINC_URL))
|
||||
.andParameter("package", new Attachment().setData(packageBytes))
|
||||
.execute();
|
||||
//@formatter:on
|
||||
|
||||
resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
||||
ourLog.info(resp);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -180,6 +198,8 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
|||
|
||||
zos.putNextEntry(new ZipEntry("loinc.csv"));
|
||||
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/loinc.csv")));
|
||||
zos.putNextEntry(new ZipEntry("LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV"));
|
||||
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV")));
|
||||
zos.close();
|
||||
|
||||
byte[] packageBytes = bos.toByteArray();
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
|
||||
public class TerminologyLoaderSvcIntegrationTest extends BaseJpaDstu3Test {
|
||||
|
||||
private TerminologyLoaderSvc myLoader;
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeInitTest() {
|
||||
myLoader = new TerminologyLoaderSvc();
|
||||
myLoader.setTermSvcForUnitTests(myTermSvc);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void testLoadAndStoreSnomedCt() {
|
||||
Map<String, File> files = new HashMap<String, File>();
|
||||
files.put(TerminologyLoaderSvc.SCT_FILE_CONCEPT, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Concept_Full_INT_20160131.txt"));
|
||||
files.put(TerminologyLoaderSvc.SCT_FILE_DESCRIPTION, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Description_Full-en_INT_20160131.txt"));
|
||||
files.put(TerminologyLoaderSvc.SCT_FILE_RELATIONSHIP, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Relationship_Full_INT_20160131.txt"));
|
||||
myLoader.processSnomedCtFiles(files, mySrd);
|
||||
}
|
||||
|
||||
}
|
|
@ -5,6 +5,8 @@ import static org.mockito.Mockito.mock;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
|
@ -39,15 +41,20 @@ public class TerminologyLoaderSvcTest {
|
|||
|
||||
@Test
|
||||
public void testLoadLoinc() throws Exception {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||
addEntry(zos,"/loinc/", "loinc.csv");
|
||||
zos.close();
|
||||
ByteArrayOutputStream bos1 = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos1 = new ZipOutputStream(bos1);
|
||||
addEntry(zos1,"/loinc/", "loinc.csv");
|
||||
zos1.close();
|
||||
ourLog.info("ZIP file has {} bytes", bos1.toByteArray().length);
|
||||
|
||||
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
||||
ByteArrayOutputStream bos2 = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos2 = new ZipOutputStream(bos2);
|
||||
addEntry(zos2,"/loinc/", "LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV");
|
||||
zos2.close();
|
||||
ourLog.info("ZIP file has {} bytes", bos2.toByteArray().length);
|
||||
|
||||
RequestDetails details = mock(RequestDetails.class);
|
||||
mySvc.loadLoinc(bos.toByteArray(), details);
|
||||
mySvc.loadLoinc(Arrays.asList(bos1.toByteArray(), bos2.toByteArray()), details);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -66,7 +73,7 @@ public class TerminologyLoaderSvcTest {
|
|||
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
||||
|
||||
RequestDetails details = mock(RequestDetails.class);
|
||||
mySvc.loadSnomedCt(bos.toByteArray(), details);
|
||||
mySvc.loadSnomedCt(Collections.singletonList(bos.toByteArray()), details);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -80,7 +87,7 @@ public class TerminologyLoaderSvcTest {
|
|||
|
||||
RequestDetails details = mock(RequestDetails.class);
|
||||
try {
|
||||
mySvc.loadSnomedCt(bos.toByteArray(), details);
|
||||
mySvc.loadSnomedCt(Collections.singletonList(bos.toByteArray()), details);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Invalid input zip file, expected zip to contain the following name fragments: [Terminology/sct2_Description_Full-en, Terminology/sct2_Relationship_Full, Terminology/sct2_Concept_Full_] but found: []", e.getMessage());
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
PATH_TO_ROOT,SEQUENCE,IMMEDIATE_PARENT,CODE,CODE_TEXT
|
||||
,1,,LP31755-9,Microbiology
|
||||
LP31755-9,1,LP31755-9,LP14559-6,Microorganism
|
||||
LP31755-9.LP14559-6,1,LP14559-6,LP98185-9,Bacteria
|
||||
LP31755-9.LP14559-6.LP98185-9,1,LP98185-9,LP14082-9,Bacteria
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9,1,LP14082-9,LP52258-8,Bacteria | Body Fluid
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52258-8,1,LP52258-8,41599-2,Bacteria Fld Ql Micro
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9,2,LP14082-9,LP52260-4,Bacteria | Cerebral spinal fluid
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52260-4,1,LP52260-4,41602-4,Bacteria CSF Ql Micro
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9,3,LP14082-9,LP52960-9,Bacteria | Cervix
|
|
|
@ -127,7 +127,6 @@
|
|||
<dependency>
|
||||
<groupId>net.sf.json-lib</groupId>
|
||||
<artifactId>json-lib</artifactId>
|
||||
<version>2.4</version>
|
||||
<classifier>jdk15</classifier>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
|
@ -140,14 +139,12 @@
|
|||
<dependency>
|
||||
<groupId>net.sf.json-lib</groupId>
|
||||
<artifactId>json-lib</artifactId>
|
||||
<version>2.4</version>
|
||||
<classifier>jdk15-sources</classifier>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>directory-naming</groupId>
|
||||
<artifactId>naming-java</artifactId>
|
||||
<version>0.8</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
|
|
30
pom.xml
30
pom.xml
|
@ -320,6 +320,18 @@
|
|||
<artifactId>commons-io</artifactId>
|
||||
<version>2.4</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>directory-naming</groupId>
|
||||
<artifactId>naming-java</artifactId>
|
||||
<version>0.8</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<groupId>commons-logging</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.ejb</groupId>
|
||||
<artifactId>ejb-api</artifactId>
|
||||
|
@ -365,6 +377,24 @@
|
|||
<artifactId>reflow-velocity-tools</artifactId>
|
||||
<version>1.1.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sf.json-lib</groupId>
|
||||
<artifactId>json-lib</artifactId>
|
||||
<version>2.4</version>
|
||||
<classifier>jdk15</classifier>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<groupId>commons-logging</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sf.json-lib</groupId>
|
||||
<artifactId>json-lib</artifactId>
|
||||
<version>2.4</version>
|
||||
<classifier>jdk15-sources</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
|
|
|
@ -99,6 +99,38 @@ Java HotSpot(TM) 64-Bit Server VM (build 25.60-b23, mixed mode)]]></pre>
|
|||
large number of examples.
|
||||
</p>
|
||||
</section>
|
||||
|
||||
<section name="Upload Terminology">
|
||||
|
||||
<p>
|
||||
The HAPI FHIR JPA server has a terminology server, and has the ability to
|
||||
be populated with "external" code systems. These code systems are systems
|
||||
that contain large numbers of codes, so the codes are not stored directly
|
||||
inside the resource body.
|
||||
</p>
|
||||
<p>
|
||||
HAPI has methods for uploading several popular code systems into its tables
|
||||
using the distribution files produced by the respective code systems. This
|
||||
is done using the <code>upload-terminology</code> command. The following
|
||||
examples show how to do this for several popular code systems.
|
||||
</p>
|
||||
<p>
|
||||
Note that the path and exact filename of the terminology files will likely
|
||||
need to be adjusted for your local disk structure.
|
||||
</p>
|
||||
<h4>
|
||||
SNOMED CT
|
||||
</h4>
|
||||
<pre>./hapi-fhir-cli upload-terminology -d Downloads/SnomedCT_RF2Release_INT_20160131.zip -f dstu3 -t http://localhost:8080/baseDstu3 -u http://snomed.info/sct</pre>
|
||||
|
||||
<p>
|
||||
|
||||
./hapi-fhir-cli upload-terminology -d Downloads/LOINC_2.54_MULTI-AXIAL_HIERARCHY.zip -d Downloads/LOINC_2.54_Text.zip -f dstu3 -t http://localhost:8080/baseDstu3 -u http://loinc.org
|
||||
</p>
|
||||
|
||||
|
||||
</section>
|
||||
|
||||
</body>
|
||||
|
||||
</document>
|
||||
|
|
Loading…
Reference in New Issue