Lots more additions to LOINC
This commit is contained in:
parent
5a653aeb20
commit
a5a7d75e58
|
@ -6,6 +6,10 @@ TODO:
|
||||||
|
|
||||||
Comments for Loinc:
|
Comments for Loinc:
|
||||||
|
|
||||||
|
Overall
|
||||||
|
- ValueSet and ConceptMap resources have a spot for copyright and
|
||||||
|
contact information. Are there official values for these?
|
||||||
|
|
||||||
Answer Lists
|
Answer Lists
|
||||||
- Per the notes, there is no way in FHIR currently to map answer lists to
|
- Per the notes, there is no way in FHIR currently to map answer lists to
|
||||||
codes based on context. For this reason, I am ignoring any entries in
|
codes based on context. For this reason, I am ignoring any entries in
|
||||||
|
@ -18,5 +22,31 @@ Parts
|
||||||
- The PartTypeName (e.g. "ADJUSTMENT") is ignored as there is no corresponding
|
- The PartTypeName (e.g. "ADJUSTMENT") is ignored as there is no corresponding
|
||||||
property in loinc.xml
|
property in loinc.xml
|
||||||
- PartDisplayName is not mapped
|
- PartDisplayName is not mapped
|
||||||
|
- Part links are not currently processed (it's not clear to me how to model
|
||||||
|
these in FHIR, as CodeSystem.hierarchyMeaning has to be only one of 'is-a'
|
||||||
|
or 'part-of' and presumably the 'is-a' relationship is more important.
|
||||||
|
|
||||||
|
Part Mappings
|
||||||
|
- I have made LOINC the source and SCT the target for the mappings in the
|
||||||
|
ConceptMap resource. Does this seem like the appropriate orientation?
|
||||||
|
- A canonical URI should be defined for the LOINC->SCT mapping ConceptMap
|
||||||
|
resource. I have hardcoded "http://loinc.org/loinc-to-snomed" for now, but
|
||||||
|
we should discuss what is appropriate.
|
||||||
|
|
||||||
|
RSNA Playbook
|
||||||
|
- A canonical URI should be defined for the "all RSNA playbook codes" ValueSet.
|
||||||
|
I have hardcoded "http://loinc.org/rsna-codes" for now but we should discuss
|
||||||
|
what is appropriate.
|
||||||
|
- A name for the "RSNA Playbook" ValueSet is needed.
|
||||||
|
- Just to confirm, the "all RSNA playbook codes" ValueSet should contain the
|
||||||
|
loinc codes (e.g. "17787-3") and not the part codes (e.g. "LP199995-4")?
|
||||||
|
- A codesystem URI for radlex RID and RPID codes is needed (currently
|
||||||
|
"http://rid" and "http://rpid" are used as placeholders since I'm assuming
|
||||||
|
these exist somewhere.
|
||||||
|
- For mappings from loinc part codes to RadLex RIDs, are the codes considered
|
||||||
|
equivalent (or would they be wider/narrower). They look equivalent to me.
|
||||||
|
|
||||||
|
Document Ontology
|
||||||
|
- Need to define a URI for the document ontology ValueSet. Currently I am
|
||||||
|
using "http://loinc.org/document-ontology-codes"
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,7 @@ public class BaseDstu3Config extends BaseConfig {
|
||||||
|
|
||||||
@Bean(autowire = Autowire.BY_TYPE)
|
@Bean(autowire = Autowire.BY_TYPE)
|
||||||
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
||||||
return new TerminologyLoaderSvc();
|
return new TerminologyLoaderSvcImpl();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(autowire = Autowire.BY_TYPE)
|
@Bean(autowire = Autowire.BY_TYPE)
|
||||||
|
|
|
@ -14,7 +14,7 @@ import ca.uhn.fhir.jpa.provider.r4.TerminologyUploaderProviderR4;
|
||||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcR4;
|
import ca.uhn.fhir.jpa.term.HapiTerminologySvcR4;
|
||||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR4;
|
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR4;
|
||||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc;
|
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR4;
|
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR4;
|
||||||
import ca.uhn.fhir.validation.IValidatorModule;
|
import ca.uhn.fhir.validation.IValidatorModule;
|
||||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||||
|
@ -124,7 +124,7 @@ public class BaseR4Config extends BaseConfig {
|
||||||
|
|
||||||
@Bean(autowire = Autowire.BY_TYPE)
|
@Bean(autowire = Autowire.BY_TYPE)
|
||||||
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
||||||
return new TerminologyLoaderSvc();
|
return new TerminologyLoaderSvcImpl();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(autowire = Autowire.BY_TYPE)
|
@Bean(autowire = Autowire.BY_TYPE)
|
||||||
|
|
|
@ -8,6 +8,7 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
import org.hibernate.search.annotations.*;
|
import org.hibernate.search.annotations.*;
|
||||||
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.persistence.*;
|
import javax.persistence.*;
|
||||||
|
@ -121,14 +122,27 @@ public class TermConcept implements Serializable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addProperty(@Nonnull String thePropertyName, @Nonnull String thePropertyValue) {
|
private TermConceptProperty addProperty(@Nonnull TermConceptPropertyTypeEnum thePropertyType, @Nonnull String thePropertyName, @Nonnull String thePropertyValue) {
|
||||||
Validate.notBlank(thePropertyName);
|
Validate.notBlank(thePropertyName);
|
||||||
|
|
||||||
TermConceptProperty property = new TermConceptProperty();
|
TermConceptProperty property = new TermConceptProperty();
|
||||||
property.setConcept(this);
|
property.setConcept(this);
|
||||||
|
property.setType(thePropertyType);
|
||||||
property.setKey(thePropertyName);
|
property.setKey(thePropertyName);
|
||||||
property.setValue(thePropertyValue);
|
property.setValue(thePropertyValue);
|
||||||
getProperties().add(property);
|
getStringProperties().add(property);
|
||||||
|
|
||||||
|
return property;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TermConceptProperty addPropertyCoding(@Nonnull String thePropertyName, @Nonnull String thePropertyCodeSystem, @Nonnull String thePropertyCode, String theDisplayName) {
|
||||||
|
return addProperty(TermConceptPropertyTypeEnum.CODING, thePropertyName, thePropertyCode)
|
||||||
|
.setCodeSystem(thePropertyCodeSystem)
|
||||||
|
.setDisplay(theDisplayName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TermConceptProperty addPropertyString(@Nonnull String thePropertyName, @Nonnull String thePropertyValue) {
|
||||||
|
return addProperty(TermConceptPropertyTypeEnum.STRING, thePropertyName, thePropertyValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -159,10 +173,6 @@ public class TermConcept implements Serializable {
|
||||||
return myCode;
|
return myCode;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getSequence() {
|
|
||||||
return mySequence;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCode(String theCode) {
|
public void setCode(String theCode) {
|
||||||
myCode = theCode;
|
myCode = theCode;
|
||||||
}
|
}
|
||||||
|
@ -213,32 +223,57 @@ public class TermConcept implements Serializable {
|
||||||
return myParents;
|
return myParents;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Collection<TermConceptProperty> getProperties() {
|
public Collection<TermConceptProperty> getStringProperties() {
|
||||||
if (myProperties == null) {
|
if (myProperties == null) {
|
||||||
myProperties = new ArrayList<>();
|
myProperties = new ArrayList<>();
|
||||||
}
|
}
|
||||||
return myProperties;
|
return myProperties;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getProperty(String thePropertyName) {
|
public List<String> getStringProperties(String thePropertyName) {
|
||||||
for (TermConceptProperty next : getProperties()) {
|
|
||||||
if (thePropertyName.equals(next.getKey())) {
|
|
||||||
return next.getValue();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getProperties(String thePropertyName) {
|
|
||||||
List<String> retVal = new ArrayList<>();
|
List<String> retVal = new ArrayList<>();
|
||||||
for (TermConceptProperty next : getProperties()) {
|
for (TermConceptProperty next : getStringProperties()) {
|
||||||
if (thePropertyName.equals(next.getKey())) {
|
if (thePropertyName.equals(next.getKey())) {
|
||||||
retVal.add(next.getValue());
|
if (next.getType() == TermConceptPropertyTypeEnum.STRING) {
|
||||||
|
retVal.add(next.getValue());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<Coding> getCodingProperties(String thePropertyName) {
|
||||||
|
List<Coding> retVal = new ArrayList<>();
|
||||||
|
for (TermConceptProperty next : getStringProperties()) {
|
||||||
|
if (thePropertyName.equals(next.getKey())) {
|
||||||
|
if (next.getType() == TermConceptPropertyTypeEnum.CODING) {
|
||||||
|
Coding coding = new Coding();
|
||||||
|
coding.setSystem(next.getCodeSystem());
|
||||||
|
coding.setCode(next.getValue());
|
||||||
|
coding.setDisplay(next.getDisplay());
|
||||||
|
retVal.add(coding);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getSequence() {
|
||||||
|
return mySequence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSequence(Integer theSequence) {
|
||||||
|
mySequence = theSequence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getStringProperty(String thePropertyName) {
|
||||||
|
List<String> properties = getStringProperties(thePropertyName);
|
||||||
|
if (properties.size() > 0) {
|
||||||
|
return properties.get(0);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
HashCodeBuilder b = new HashCodeBuilder();
|
HashCodeBuilder b = new HashCodeBuilder();
|
||||||
|
@ -294,10 +329,6 @@ public class TermConcept implements Serializable {
|
||||||
myParentPids = theParentPids;
|
myParentPids = theParentPids;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSequence(Integer theSequence) {
|
|
||||||
mySequence = theSequence;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("code", myCode).append("display", myDisplay).build();
|
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("code", myCode).append("display", myDisplay).build();
|
||||||
|
|
|
@ -33,8 +33,8 @@ import java.io.Serializable;
|
||||||
})
|
})
|
||||||
public class TermConceptProperty implements Serializable {
|
public class TermConceptProperty implements Serializable {
|
||||||
|
|
||||||
|
static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
@ManyToOne
|
@ManyToOne
|
||||||
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
|
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
|
||||||
private TermConcept myConcept;
|
private TermConcept myConcept;
|
||||||
|
@ -48,6 +48,48 @@ public class TermConceptProperty implements Serializable {
|
||||||
private String myKey;
|
private String myKey;
|
||||||
@Column(name = "PROP_VAL", length = 200, nullable = true)
|
@Column(name = "PROP_VAL", length = 200, nullable = true)
|
||||||
private String myValue;
|
private String myValue;
|
||||||
|
@Column(name = "PROP_TYPE", length = MAX_PROPTYPE_ENUM_LENGTH, nullable = false)
|
||||||
|
private TermConceptPropertyTypeEnum myType;
|
||||||
|
/**
|
||||||
|
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
|
||||||
|
*/
|
||||||
|
@Column(name = "PROP_CODESYSTEM", length = 200, nullable = true)
|
||||||
|
private String myCodeSystem;
|
||||||
|
/**
|
||||||
|
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
|
||||||
|
*/
|
||||||
|
@Column(name = "PROP_DISPLAY", length = 200, nullable = true)
|
||||||
|
private String myDisplay;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
|
||||||
|
*/
|
||||||
|
public String getCodeSystem() {
|
||||||
|
return myCodeSystem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
|
||||||
|
*/
|
||||||
|
public TermConceptProperty setCodeSystem(String theCodeSystem) {
|
||||||
|
myCodeSystem = theCodeSystem;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
|
||||||
|
*/
|
||||||
|
public String getDisplay() {
|
||||||
|
return myDisplay;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
|
||||||
|
*/
|
||||||
|
public TermConceptProperty setDisplay(String theDisplay) {
|
||||||
|
myDisplay = theDisplay;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public String getKey() {
|
public String getKey() {
|
||||||
return myKey;
|
return myKey;
|
||||||
|
@ -57,10 +99,27 @@ public class TermConceptProperty implements Serializable {
|
||||||
myKey = theKey;
|
myKey = theKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TermConceptPropertyTypeEnum getType() {
|
||||||
|
return myType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TermConceptProperty setType(TermConceptPropertyTypeEnum theType) {
|
||||||
|
myType = theType;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This will contain the value for a {@link TermConceptPropertyTypeEnum#STRING string}
|
||||||
|
* property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property.
|
||||||
|
*/
|
||||||
public String getValue() {
|
public String getValue() {
|
||||||
return myValue;
|
return myValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This will contain the value for a {@link TermConceptPropertyTypeEnum#STRING string}
|
||||||
|
* property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property.
|
||||||
|
*/
|
||||||
public void setValue(String theValue) {
|
public void setValue(String theValue) {
|
||||||
myValue = theValue;
|
myValue = theValue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see TermConceptProperty#getType()
|
||||||
|
* @see TermConceptProperty#MAX_PROPTYPE_ENUM_LENGTH
|
||||||
|
*/
|
||||||
|
public enum TermConceptPropertyTypeEnum {
|
||||||
|
/*
|
||||||
|
* VALUES SHOULD BE <= 6 CHARS LONG!
|
||||||
|
*
|
||||||
|
* We store this in a DB column of that length
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* String
|
||||||
|
*/
|
||||||
|
STRING,
|
||||||
|
/**
|
||||||
|
* Coding
|
||||||
|
*/
|
||||||
|
CODING
|
||||||
|
}
|
|
@ -48,6 +48,7 @@ import org.hibernate.search.query.dsl.BooleanJunction;
|
||||||
import org.hibernate.search.query.dsl.QueryBuilder;
|
import org.hibernate.search.query.dsl.QueryBuilder;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.data.domain.Page;
|
import org.springframework.data.domain.Page;
|
||||||
|
@ -91,20 +92,18 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
||||||
@Autowired
|
@Autowired
|
||||||
private DaoConfig myDaoConfig;
|
private DaoConfig myDaoConfig;
|
||||||
private long myNextReindexPass;
|
private long myNextReindexPass;
|
||||||
|
|
||||||
private boolean myProcessDeferred = true;
|
private boolean myProcessDeferred = true;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private PlatformTransactionManager myTransactionMgr;
|
private PlatformTransactionManager myTransactionMgr;
|
||||||
@Autowired
|
@Autowired
|
||||||
private IFhirResourceDaoCodeSystem<?, ?, ?> myCodeSystemResourceDao;
|
private IFhirResourceDaoCodeSystem<?, ?, ?> myCodeSystemResourceDao;
|
||||||
|
|
||||||
private void addCodeIfNotAlreadyAdded(String system, ValueSet.ValueSetExpansionComponent retVal, Set<String> addedCodes, TermConcept nextConcept) {
|
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept) {
|
||||||
if (addedCodes.add(nextConcept.getCode())) {
|
if (theAddedCodes.add(theConcept.getCode())) {
|
||||||
ValueSet.ValueSetExpansionContainsComponent contains = retVal.addContains();
|
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
|
||||||
contains.setCode(nextConcept.getCode());
|
contains.setCode(theConcept.getCode());
|
||||||
contains.setSystem(system);
|
contains.setSystem(theCodeSystem);
|
||||||
contains.setDisplay(nextConcept.getDisplay());
|
contains.setDisplay(theConcept.getDisplay());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,6 +136,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
||||||
|
|
||||||
protected abstract IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource, RequestDetails theRequestDetails);
|
protected abstract IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
|
protected abstract void createOrUpdateConceptMap(ConceptMap theNextConceptMap, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
abstract void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails);
|
abstract void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -248,7 +249,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
// bool.must(qb.keyword().onField("myProperties").matching(nextFilter.getProperty()+"="+nextFilter.getValue()).createQuery());
|
// bool.must(qb.keyword().onField("myProperties").matching(nextFilter.getStringProperty()+"="+nextFilter.getValue()).createQuery());
|
||||||
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty() + "=" + nextFilter.getValue()).createQuery());
|
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty() + "=" + nextFilter.getValue()).createQuery());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -300,8 +301,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
||||||
|
|
||||||
private TermConcept fetchLoadedCode(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
|
private TermConcept fetchLoadedCode(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
|
||||||
TermCodeSystemVersion codeSystem = myCodeSystemVersionDao.findByCodeSystemResourceAndVersion(theCodeSystemResourcePid, theCodeSystemVersionPid);
|
TermCodeSystemVersion codeSystem = myCodeSystemVersionDao.findByCodeSystemResourceAndVersion(theCodeSystemResourcePid, theCodeSystemVersionPid);
|
||||||
TermConcept concept = myConceptDao.findByCodeSystemAndCode(codeSystem, theCode);
|
return myConceptDao.findByCodeSystemAndCode(codeSystem, theCode);
|
||||||
return concept;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchParents(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
|
private void fetchParents(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
|
||||||
|
@ -353,8 +353,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
||||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||||
|
|
||||||
Set<TermConcept> codes = findCodesAbove(cs.getResource().getId(), csv.getPid(), theCode);
|
Set<TermConcept> codes = findCodesAbove(cs.getResource().getId(), csv.getPid(), theCode);
|
||||||
ArrayList<VersionIndependentConcept> retVal = toVersionIndependentConcepts(theSystem, codes);
|
return toVersionIndependentConcepts(theSystem, codes);
|
||||||
return retVal;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional(propagation = Propagation.REQUIRED)
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
|
@ -711,7 +710,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRED)
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
public void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
|
public void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||||
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
|
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
|
||||||
|
|
||||||
IIdType csId = createOrUpdateCodeSystem(theCodeSystemResource, theRequestDetails);
|
IIdType csId = createOrUpdateCodeSystem(theCodeSystemResource, theRequestDetails);
|
||||||
|
@ -728,6 +727,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
||||||
createOrUpdateValueSet(nextValueSet, theRequestDetails);
|
createOrUpdateValueSet(nextValueSet, theRequestDetails);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (ConceptMap nextConceptMap : theConceptMaps) {
|
||||||
|
createOrUpdateConceptMap(nextConceptMap, theRequestDetails);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -20,11 +20,11 @@ package ca.uhn.fhir.jpa.term;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
|
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
@ -66,6 +66,16 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void createOrUpdateConceptMap(ConceptMap theNextConceptMap, RequestDetails theRequestDetails) {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails) {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
|
@ -113,14 +123,4 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,6 +63,9 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
|
||||||
@Qualifier("myValueSetDaoDstu3")
|
@Qualifier("myValueSetDaoDstu3")
|
||||||
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@Qualifier("myConceptMapDaoDstu3")
|
||||||
|
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||||
|
@Autowired
|
||||||
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemResourceDao;
|
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemResourceDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
private IValidationSupport myValidationSupport;
|
private IValidationSupport myValidationSupport;
|
||||||
|
@ -76,19 +79,6 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet, RequestDetails theRequestDetails) {
|
|
||||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
|
||||||
ValueSet valueSetDstu3;
|
|
||||||
try {
|
|
||||||
valueSetDstu3 = VersionConvertor_30_40.convertValueSet(theValueSet);
|
|
||||||
} catch (FHIRException e) {
|
|
||||||
throw new InternalErrorException(e);
|
|
||||||
}
|
|
||||||
myValueSetResourceDao.update(valueSetDstu3, matchUrl, theRequestDetails);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void addAllChildren(String theSystemString, ConceptDefinitionComponent theCode, List<VersionIndependentConcept> theListToPopulate) {
|
private void addAllChildren(String theSystemString, ConceptDefinitionComponent theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||||
if (isNotBlank(theCode.getCode())) {
|
if (isNotBlank(theCode.getCode())) {
|
||||||
theListToPopulate.add(new VersionIndependentConcept(theSystemString, theCode.getCode()));
|
theListToPopulate.add(new VersionIndependentConcept(theSystemString, theCode.getCode()));
|
||||||
|
@ -124,6 +114,30 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
|
||||||
return myCodeSystemResourceDao.update(resourceToStore, matchUrl, theRequestDetails).getId();
|
return myCodeSystemResourceDao.update(resourceToStore, matchUrl, theRequestDetails).getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap, RequestDetails theRequestDetails) {
|
||||||
|
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||||
|
ConceptMap resourceToStore;
|
||||||
|
try {
|
||||||
|
resourceToStore = VersionConvertor_30_40.convertConceptMap(theConceptMap);
|
||||||
|
} catch (FHIRException e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
myConceptMapResourceDao.update(resourceToStore, matchUrl, theRequestDetails).getId();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet, RequestDetails theRequestDetails) {
|
||||||
|
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||||
|
ValueSet valueSetDstu3;
|
||||||
|
try {
|
||||||
|
valueSetDstu3 = VersionConvertor_30_40.convertValueSet(theValueSet);
|
||||||
|
} catch (FHIRException e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
myValueSetResourceDao.update(valueSetDstu3, matchUrl, theRequestDetails);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
|
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
|
||||||
ValueSet valueSetToExpand = new ValueSet();
|
ValueSet valueSetToExpand = new ValueSet();
|
||||||
|
|
|
@ -12,6 +12,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent;
|
import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent;
|
||||||
|
@ -55,6 +56,9 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
|
||||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||||
protected EntityManager myEntityManager;
|
protected EntityManager myEntityManager;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@Qualifier("myConceptMapDaoR4")
|
||||||
|
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||||
|
@Autowired
|
||||||
@Qualifier("myCodeSystemDaoR4")
|
@Qualifier("myCodeSystemDaoR4")
|
||||||
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -76,7 +80,6 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private boolean addTreeIfItContainsCode(String theSystemString, ConceptDefinitionComponent theNext, String theCode, List<VersionIndependentConcept> theListToPopulate) {
|
private boolean addTreeIfItContainsCode(String theSystemString, ConceptDefinitionComponent theNext, String theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||||
boolean foundCodeInChild = false;
|
boolean foundCodeInChild = false;
|
||||||
for (ConceptDefinitionComponent nextChild : theNext.getConcept()) {
|
for (ConceptDefinitionComponent nextChild : theNext.getConcept()) {
|
||||||
|
@ -97,6 +100,12 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
|
||||||
return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl, theRequestDetails).getId();
|
return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl, theRequestDetails).getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap, RequestDetails theRequestDetails) {
|
||||||
|
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||||
|
myConceptMapResourceDao.update(theConceptMap, matchUrl, theRequestDetails).getId();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails) {
|
protected void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails) {
|
||||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||||
|
|
|
@ -65,6 +65,6 @@ public interface IHapiTerminologySvc {
|
||||||
|
|
||||||
List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode);
|
List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode);
|
||||||
|
|
||||||
void storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets);
|
void storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets, List<org.hl7.fhir.r4.model.ConceptMap> theConceptMaps);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.commons.lang3.ObjectUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
@ -55,20 +56,22 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
public static final String LOINC_FILE = "loinc.csv";
|
|
||||||
public static final String LOINC_HIERARCHY_FILE = "MULTI-AXIAL_HIERARCHY.CSV";
|
|
||||||
public static final String LOINC_ANSWERLIST_FILE = "AnswerList_Beta_1.csv";
|
|
||||||
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink_Beta_1.csv";
|
|
||||||
public static final String LOINC_PART_FILE = "Part_Beta_1.csv";
|
|
||||||
public static final String LOINC_PART_LINK_FILE = "LoincPartLink_Beta_1.csv";
|
|
||||||
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping_Beta_1.csv";
|
|
||||||
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
||||||
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
||||||
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
||||||
private static final int LOG_INCREMENT = 100000;
|
private static final int LOG_INCREMENT = 100000;
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvc.class);
|
public static final String LOINC_ANSWERLIST_FILE = "AnswerList_Beta_1.csv";
|
||||||
|
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink_Beta_1.csv";
|
||||||
|
public static final String LOINC_DOCUMENT_ONTOLOGY_FILE = "DocumentOntology.csv";
|
||||||
|
public static final String LOINC_FILE = "loinc.csv";
|
||||||
|
public static final String LOINC_HIERARCHY_FILE = "MULTI-AXIAL_HIERARCHY.CSV";
|
||||||
|
public static final String LOINC_PART_FILE = "Part_Beta_1.csv";
|
||||||
|
public static final String LOINC_PART_LINK_FILE = "LoincPartLink_Beta_1.csv";
|
||||||
|
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping_Beta_1.csv";
|
||||||
|
public static final String LOINC_RSNA_PLAYBOOK_FILE = "LoincRsnaRadiologyPlaybook.csv";
|
||||||
|
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
|
||||||
@Autowired
|
@Autowired
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
@Autowired(required = false)
|
@Autowired(required = false)
|
||||||
|
@ -191,6 +194,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||||
final List<ValueSet> valueSets = new ArrayList<>();
|
final List<ValueSet> valueSets = new ArrayList<>();
|
||||||
|
final List<ConceptMap> conceptMaps = new ArrayList<>();
|
||||||
|
|
||||||
CodeSystem loincCs;
|
CodeSystem loincCs;
|
||||||
try {
|
try {
|
||||||
|
@ -233,6 +237,18 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||||
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
|
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
|
||||||
iterateOverZipFile(theZipBytes, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theZipBytes, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||||
|
|
||||||
|
// Part related code mapping
|
||||||
|
handler = new LoincPartRelatedCodeMappingHandler(codeSystemVersion, code2concept, conceptMaps);
|
||||||
|
iterateOverZipFile(theZipBytes, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||||
|
|
||||||
|
// Document Ontology File
|
||||||
|
handler = new LoincDocumentOntologyHandler(codeSystemVersion, code2concept, propertyNames, valueSets);
|
||||||
|
iterateOverZipFile(theZipBytes, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||||
|
|
||||||
|
// RSNA Playbook file
|
||||||
|
handler = new LoincRsnaPlaybookHandler(codeSystemVersion, code2concept, propertyNames, valueSets, conceptMaps);
|
||||||
|
iterateOverZipFile(theZipBytes, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||||
|
|
||||||
theZipBytes.clear();
|
theZipBytes.clear();
|
||||||
|
|
||||||
for (Entry<String, TermConcept> next : code2concept.entrySet()) {
|
for (Entry<String, TermConcept> next : code2concept.entrySet()) {
|
||||||
|
@ -242,18 +258,21 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ourLog.info("Have {} total concepts, {} root concepts", code2concept.size(), codeSystemVersion.getConcepts().size());
|
int valueSetCount = valueSets.size();
|
||||||
|
int rootConceptCount = codeSystemVersion.getConcepts().size();
|
||||||
|
int conceptCount = code2concept.size();
|
||||||
|
ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", conceptCount, rootConceptCount, valueSetCount);
|
||||||
|
|
||||||
storeCodeSystem(theRequestDetails, codeSystemVersion, loincCs, valueSets);
|
storeCodeSystem(theRequestDetails, codeSystemVersion, loincCs, valueSets, conceptMaps);
|
||||||
|
|
||||||
return new UploadStatistics(code2concept.size());
|
return new UploadStatistics(conceptCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
UploadStatistics processSnomedCtFiles(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
UploadStatistics processSnomedCtFiles(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
||||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||||
final Map<String, TermConcept> id2concept = new HashMap<String, TermConcept>();
|
final Map<String, TermConcept> id2concept = new HashMap<>();
|
||||||
final Map<String, TermConcept> code2concept = new HashMap<String, TermConcept>();
|
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||||
final Set<String> validConceptIds = new HashSet<String>();
|
final Set<String> validConceptIds = new HashSet<>();
|
||||||
|
|
||||||
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
|
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
|
||||||
iterateOverZipFile(theZipBytes, SCT_FILE_CONCEPT, handler, '\t', null);
|
iterateOverZipFile(theZipBytes, SCT_FILE_CONCEPT, handler, '\t', null);
|
||||||
|
@ -264,7 +283,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||||
iterateOverZipFile(theZipBytes, SCT_FILE_DESCRIPTION, handler, '\t', null);
|
iterateOverZipFile(theZipBytes, SCT_FILE_DESCRIPTION, handler, '\t', null);
|
||||||
|
|
||||||
ourLog.info("Got {} concepts, cloning map", code2concept.size());
|
ourLog.info("Got {} concepts, cloning map", code2concept.size());
|
||||||
final HashMap<String, TermConcept> rootConcepts = new HashMap<String, TermConcept>(code2concept);
|
final HashMap<String, TermConcept> rootConcepts = new HashMap<>(code2concept);
|
||||||
|
|
||||||
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
|
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
|
||||||
iterateOverZipFile(theZipBytes, SCT_FILE_RELATIONSHIP, handler, '\t', null);
|
iterateOverZipFile(theZipBytes, SCT_FILE_RELATIONSHIP, handler, '\t', null);
|
||||||
|
@ -293,7 +312,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||||
CodeSystem cs = new org.hl7.fhir.r4.model.CodeSystem();
|
CodeSystem cs = new org.hl7.fhir.r4.model.CodeSystem();
|
||||||
cs.setUrl(SCT_URL);
|
cs.setUrl(SCT_URL);
|
||||||
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||||
storeCodeSystem(theRequestDetails, codeSystemVersion, cs, null);
|
storeCodeSystem(theRequestDetails, codeSystemVersion, cs, null, null);
|
||||||
|
|
||||||
return new UploadStatistics(code2concept.size());
|
return new UploadStatistics(code2concept.size());
|
||||||
}
|
}
|
||||||
|
@ -308,16 +327,17 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||||
myTermSvc = theTermSvc;
|
myTermSvc = theTermSvc;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void storeCodeSystem(RequestDetails theRequestDetails, final TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystem, List<ValueSet> theValueSets) {
|
private void storeCodeSystem(RequestDetails theRequestDetails, final TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystem, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||||
Validate.isTrue(theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
Validate.isTrue(theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||||
|
|
||||||
List<ValueSet> valueSets = ObjectUtils.defaultIfNull(theValueSets, Collections.<ValueSet>emptyList());
|
List<ValueSet> valueSets = ObjectUtils.defaultIfNull(theValueSets, Collections.<ValueSet>emptyList());
|
||||||
|
List<ConceptMap> conceptMaps = ObjectUtils.defaultIfNull(theConceptMaps, Collections.<ConceptMap>emptyList());
|
||||||
|
|
||||||
myTermSvc.setProcessDeferred(false);
|
myTermSvc.setProcessDeferred(false);
|
||||||
if (myTermSvcDstu3 != null) {
|
if (myTermSvcDstu3 != null) {
|
||||||
myTermSvcDstu3.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets);
|
myTermSvcDstu3.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
|
||||||
} else {
|
} else {
|
||||||
myTermSvcR4.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets);
|
myTermSvcR4.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
|
||||||
}
|
}
|
||||||
myTermSvc.setProcessDeferred(true);
|
myTermSvc.setProcessDeferred(true);
|
||||||
}
|
}
|
|
@ -59,12 +59,12 @@ public class LoincAnswerListLinkHandler implements IRecordHandler {
|
||||||
|
|
||||||
TermConcept loincCode = myCode2Concept.get(loincNumber);
|
TermConcept loincCode = myCode2Concept.get(loincNumber);
|
||||||
if (loincCode != null) {
|
if (loincCode != null) {
|
||||||
loincCode.addProperty("answer-list", answerListId);
|
loincCode.addPropertyString("answer-list", answerListId);
|
||||||
}
|
}
|
||||||
|
|
||||||
TermConcept answerListCode = myCode2Concept.get(answerListId);
|
TermConcept answerListCode = myCode2Concept.get(answerListId);
|
||||||
if (answerListCode != null) {
|
if (answerListCode != null) {
|
||||||
answerListCode.addProperty("answers-for", loincNumber);
|
answerListCode.addPropertyString("answers-for", loincNumber);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,95 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.loinc;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.hl7.fhir.r4.model.CanonicalType;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.Enumerations;
|
||||||
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class LoincDocumentOntologyHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
public static final String DOCUMENT_ONTOLOGY_CODES_VS_ID = "DOCUMENT_ONTOLOGY_CODES_VS";
|
||||||
|
public static final String DOCUMENT_ONTOLOGY_CODES_VS_URI = "http://loinc.org/document-ontology-codes";
|
||||||
|
public static final String DOCUMENT_ONTOLOGY_CODES_VS_NAME = "LOINC Document Ontology Codes";
|
||||||
|
private final Map<String, TermConcept> myCode2Concept;
|
||||||
|
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||||
|
private final Set<String> myPropertyNames;
|
||||||
|
private final List<ValueSet> myValueSets;
|
||||||
|
private final Map<String, ValueSet> myIdToValueSet = new HashMap<>();
|
||||||
|
private final Set<String> myCodesInRsnaPlaybookValueSet = new HashSet<>();
|
||||||
|
|
||||||
|
public LoincDocumentOntologyHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept, Set<String> thePropertyNames, List<ValueSet> theValueSets) {
|
||||||
|
myCodeSystemVersion = theCodeSystemVersion;
|
||||||
|
myCode2Concept = theCode2concept;
|
||||||
|
myPropertyNames = thePropertyNames;
|
||||||
|
myValueSets = theValueSets;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
|
||||||
|
String loincNumber = trim(theRecord.get("LoincNumber"));
|
||||||
|
String partNumber = trim(theRecord.get("PartNumber"));
|
||||||
|
String partTypeName = trim(theRecord.get("PartTypeName"));
|
||||||
|
String partSequenceOrder = trim(theRecord.get("PartSequenceOrder"));
|
||||||
|
String partName = trim(theRecord.get("PartName"));
|
||||||
|
|
||||||
|
// RSNA Codes VS
|
||||||
|
ValueSet vs;
|
||||||
|
if (!myIdToValueSet.containsKey(RSNA_CODES_VS_ID)) {
|
||||||
|
vs = new ValueSet();
|
||||||
|
vs.setUrl(RSNA_CODES_VS_URI);
|
||||||
|
vs.setId(RSNA_CODES_VS_ID);
|
||||||
|
vs.setName(RSNA_CODES_VS_NAME);
|
||||||
|
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
myIdToValueSet.put(RSNA_CODES_VS_ID, vs);
|
||||||
|
myValueSets.add(vs);
|
||||||
|
} else {
|
||||||
|
vs = myIdToValueSet.get(RSNA_CODES_VS_ID);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!myCodesInRsnaPlaybookValueSet.contains(loincNumber)) {
|
||||||
|
vs
|
||||||
|
.getCompose()
|
||||||
|
.getIncludeFirstRep()
|
||||||
|
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URL)
|
||||||
|
.addConcept()
|
||||||
|
.setCode(loincNumber)
|
||||||
|
.setDisplay(longCommonName);
|
||||||
|
myCodesInRsnaPlaybookValueSet.add(loincNumber);
|
||||||
|
}
|
||||||
|
|
||||||
|
String loincCodePropName;
|
||||||
|
switch (partTypeName) {
|
||||||
|
case "Rad.Anatomic Location.Region Imaged":
|
||||||
|
loincCodePropName = "rad-anatomic-location-region-imaged";
|
||||||
|
break;
|
||||||
|
case "Rad.Anatomic Location.Imaging Focus":
|
||||||
|
loincCodePropName = "rad-anatomic-location-imaging-focus";
|
||||||
|
break;
|
||||||
|
case "Rad.Modality.Modality type":
|
||||||
|
loincCodePropName = "rad-modality-modality-type";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new InternalErrorException("Unknown PartTypeName: " + partTypeName);
|
||||||
|
}
|
||||||
|
|
||||||
|
TermConcept code = myCode2Concept.get(loincNumber);
|
||||||
|
if (code != null) {
|
||||||
|
code.addPropertyCoding(loincCodePropName, IHapiTerminologyLoaderSvc.LOINC_URL, partNumber, partName);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc;
|
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||||
import org.apache.commons.csv.CSVRecord;
|
import org.apache.commons.csv.CSVRecord;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ public class LoincHandler implements IRecordHandler {
|
||||||
String longCommonName = trim(theRecord.get("LONG_COMMON_NAME"));
|
String longCommonName = trim(theRecord.get("LONG_COMMON_NAME"));
|
||||||
String shortName = trim(theRecord.get("SHORTNAME"));
|
String shortName = trim(theRecord.get("SHORTNAME"));
|
||||||
String consumerName = trim(theRecord.get("CONSUMER_NAME"));
|
String consumerName = trim(theRecord.get("CONSUMER_NAME"));
|
||||||
String display = TerminologyLoaderSvc.firstNonBlank(longCommonName, shortName, consumerName);
|
String display = TerminologyLoaderSvcImpl.firstNonBlank(longCommonName, shortName, consumerName);
|
||||||
|
|
||||||
TermConcept concept = new TermConcept(myCodeSystemVersion, code);
|
TermConcept concept = new TermConcept(myCodeSystemVersion, code);
|
||||||
concept.setDisplay(display);
|
concept.setDisplay(display);
|
||||||
|
@ -43,7 +43,7 @@ public class LoincHandler implements IRecordHandler {
|
||||||
}
|
}
|
||||||
String nextPropertyValue = theRecord.get(nextPropertyName);
|
String nextPropertyValue = theRecord.get(nextPropertyName);
|
||||||
if (isNotBlank(nextPropertyValue)) {
|
if (isNotBlank(nextPropertyValue)) {
|
||||||
concept.addProperty(nextPropertyName, nextPropertyValue);
|
concept.addPropertyString(nextPropertyName, nextPropertyValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,6 @@ public class LoincPartLinkHandler implements IRecordHandler {
|
||||||
String loincNumber = trim(theRecord.get("LoincNumber"));
|
String loincNumber = trim(theRecord.get("LoincNumber"));
|
||||||
String longCommonName = trim(theRecord.get("LongCommonName"));
|
String longCommonName = trim(theRecord.get("LongCommonName"));
|
||||||
String partNumber = trim(theRecord.get("PartNumber"));
|
String partNumber = trim(theRecord.get("PartNumber"));
|
||||||
String partDisplayName = trim(theRecord.get("PartDisplayName"));
|
|
||||||
String status = trim(theRecord.get("Status"));
|
|
||||||
|
|
||||||
TermConcept loincConcept = myCode2Concept.get(loincNumber);
|
TermConcept loincConcept = myCode2Concept.get(loincNumber);
|
||||||
TermConcept partConcept = myCode2Concept.get(partNumber);
|
TermConcept partConcept = myCode2Concept.get(partNumber);
|
||||||
|
@ -44,7 +42,7 @@ public class LoincPartLinkHandler implements IRecordHandler {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
partConcept.addProperty();
|
// For now we're ignoring these
|
||||||
|
|
||||||
}
|
}
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(LoincPartLinkHandler.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(LoincPartLinkHandler.class);
|
||||||
|
|
|
@ -0,0 +1,130 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.loinc;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.hl7.fhir.r4.model.CanonicalType;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.Enumerations;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class LoincPartRelatedCodeMappingHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
public static final String LOINC_TO_SNOMED_CM_ID = "LOINC_TO_SNOMED_CM";
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(LoincPartRelatedCodeMappingHandler.class);
|
||||||
|
private final Map<String, TermConcept> myCode2Concept;
|
||||||
|
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||||
|
private final List<ConceptMap> myConceptMaps;
|
||||||
|
|
||||||
|
public LoincPartRelatedCodeMappingHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept, List<ConceptMap> theConceptMaps) {
|
||||||
|
myCodeSystemVersion = theCodeSystemVersion;
|
||||||
|
myCode2Concept = theCode2concept;
|
||||||
|
myConceptMaps = theConceptMaps;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
|
||||||
|
String partNumber = trim(theRecord.get("PartNumber"));
|
||||||
|
String partName = trim(theRecord.get("PartName"));
|
||||||
|
String partTypeName = trim(theRecord.get("PartTypeName"));
|
||||||
|
String extCodeId = trim(theRecord.get("ExtCodeId"));
|
||||||
|
// TODO: use hex code for ascii 160
|
||||||
|
extCodeId = extCodeId.replace(" ", "");
|
||||||
|
String extCodeDisplayName = trim(theRecord.get("ExtCodeDisplayName"));
|
||||||
|
String extCodeSystem = trim(theRecord.get("ExtCodeSystem"));
|
||||||
|
String mapType = trim(theRecord.get("MapType"));
|
||||||
|
String contentOrigin = trim(theRecord.get("ContentOrigin"));
|
||||||
|
String extCodeSystemVersion = trim(theRecord.get("ExtCodeSystemVersion"));
|
||||||
|
String extCodeSystemCopyrightNotice = trim(theRecord.get("ExtCodeSystemCopyrightNotice"));
|
||||||
|
|
||||||
|
ConceptMap conceptMap;
|
||||||
|
if (extCodeSystem.equals(IHapiTerminologyLoaderSvc.SCT_URL)) {
|
||||||
|
conceptMap = findOrAddCodeSystem(LOINC_TO_SNOMED_CM_ID, "http://loinc.org/loinc-to-snomed", extCodeSystem, extCodeSystemCopyrightNotice);
|
||||||
|
} else {
|
||||||
|
throw new InternalErrorException("Unknown external code system ID: " + extCodeSystem);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ConceptMap.ConceptMapGroupComponent group = null;
|
||||||
|
for (ConceptMap.ConceptMapGroupComponent next : conceptMap.getGroup()) {
|
||||||
|
if (next.getTarget().equals(extCodeSystem)) {
|
||||||
|
if (defaultIfBlank(next.getTargetVersion(), "").equals(defaultIfBlank(extCodeSystemVersion, ""))) {
|
||||||
|
group = next;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (group == null) {
|
||||||
|
group = conceptMap.addGroup();
|
||||||
|
group.setSource(IHapiTerminologyLoaderSvc.LOINC_URL);
|
||||||
|
group.setTarget(extCodeSystem);
|
||||||
|
group.setTargetVersion(defaultIfBlank(extCodeSystemVersion, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
ConceptMap.SourceElementComponent element = null;
|
||||||
|
for (ConceptMap.SourceElementComponent next : group.getElement()) {
|
||||||
|
if (next.getCode().equals(partNumber)) {
|
||||||
|
element = next;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (element == null) {
|
||||||
|
element = group
|
||||||
|
.addElement()
|
||||||
|
.setCode(partNumber)
|
||||||
|
.setDisplay(partName);
|
||||||
|
}
|
||||||
|
|
||||||
|
ConceptMap.TargetElementComponent target = element
|
||||||
|
.addTarget()
|
||||||
|
.setCode(extCodeId)
|
||||||
|
.setDisplay(extCodeDisplayName);
|
||||||
|
|
||||||
|
switch (mapType) {
|
||||||
|
case "Exact":
|
||||||
|
// 'equal' is more exact than 'equivalent' in the equivalence codes
|
||||||
|
target.setEquivalence(Enumerations.ConceptMapEquivalence.EQUAL);
|
||||||
|
break;
|
||||||
|
case "LOINC broader":
|
||||||
|
target.setEquivalence(Enumerations.ConceptMapEquivalence.NARROWER);
|
||||||
|
break;
|
||||||
|
case "LOINC narrower":
|
||||||
|
target.setEquivalence(Enumerations.ConceptMapEquivalence.WIDER);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new InternalErrorException("Unknown MapType: " + mapType);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private ConceptMap findOrAddCodeSystem(String theId, String theUri, String theTargetCodeSystem, String theTargetCopyright) {
|
||||||
|
for (ConceptMap next : myConceptMaps) {
|
||||||
|
if (next.getId().equals(theId)) {
|
||||||
|
return next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ConceptMap cm = new ConceptMap();
|
||||||
|
cm.setId(theId);
|
||||||
|
cm.setUrl(theUri);
|
||||||
|
cm.setSource(new CanonicalType(IHapiTerminologyLoaderSvc.LOINC_URL));
|
||||||
|
cm.setTarget(new CanonicalType(theTargetCodeSystem));
|
||||||
|
cm.setCopyright(theTargetCopyright);
|
||||||
|
myConceptMaps.add(cm);
|
||||||
|
return cm;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,165 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.loinc;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.hl7.fhir.r4.model.CanonicalType;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.Enumerations;
|
||||||
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class LoincRsnaPlaybookHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
public static final String RSNA_CODES_VS_ID = "RSNA_LOINC_CODES_VS";
|
||||||
|
public static final String RSNA_CODES_VS_URI = "http://loinc.org/rsna-codes";
|
||||||
|
public static final String RSNA_CODES_VS_NAME = "RSNA Playbook";
|
||||||
|
public static final String RID_MAPPING_CM_ID = "LOINC_TO_RID_CODES_CM";
|
||||||
|
public static final String RID_MAPPING_CM_URI = "http://loinc.org/rid-codes";
|
||||||
|
public static final String RID_MAPPING_CM_NAME = "RSNA Playbook RID Codes Mapping";
|
||||||
|
public static final String RID_CS_URI = "http://rid";
|
||||||
|
public static final String RPID_MAPPING_CM_ID = "LOINC_TO_RPID_CODES_CM";
|
||||||
|
public static final String RPID_MAPPING_CM_URI = "http://loinc.org/rpid-codes";
|
||||||
|
public static final String RPID_MAPPING_CM_NAME = "RSNA Playbook RPID Codes Mapping";
|
||||||
|
public static final String RPID_CS_URI = "http://rpid";
|
||||||
|
private final Map<String, TermConcept> myCode2Concept;
|
||||||
|
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||||
|
private final Set<String> myPropertyNames;
|
||||||
|
private final List<ValueSet> myValueSets;
|
||||||
|
private final Map<String, ValueSet> myIdToValueSet = new HashMap<>();
|
||||||
|
private final List<ConceptMap> myConceptMaps;
|
||||||
|
private final Set<String> myCodesInRsnaPlaybookValueSet = new HashSet<>();
|
||||||
|
private final Map<String, ConceptMap> myIdToConceptMaps = new HashMap<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
|
public LoincRsnaPlaybookHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept, Set<String> thePropertyNames, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||||
|
myCodeSystemVersion = theCodeSystemVersion;
|
||||||
|
myCode2Concept = theCode2concept;
|
||||||
|
myPropertyNames = thePropertyNames;
|
||||||
|
myValueSets = theValueSets;
|
||||||
|
myConceptMaps = theConceptMaps;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
|
||||||
|
String loincNumber = trim(theRecord.get("LoincNumber"));
|
||||||
|
String longCommonName = trim(theRecord.get("LongCommonName"));
|
||||||
|
String partNumber = trim(theRecord.get("PartNumber"));
|
||||||
|
String partTypeName = trim(theRecord.get("PartTypeName"));
|
||||||
|
String partName = trim(theRecord.get("PartName"));
|
||||||
|
String partSequenceOrder = trim(theRecord.get("PartSequenceOrder"));
|
||||||
|
String rid = trim(theRecord.get("RID"));
|
||||||
|
String preferredName = trim(theRecord.get("PreferredName"));
|
||||||
|
String rpid = trim(theRecord.get("RPID"));
|
||||||
|
String longName = trim(theRecord.get("LongName"));
|
||||||
|
|
||||||
|
// RSNA Codes VS
|
||||||
|
ValueSet vs;
|
||||||
|
if (!myIdToValueSet.containsKey(RSNA_CODES_VS_ID)) {
|
||||||
|
vs = new ValueSet();
|
||||||
|
vs.setUrl(RSNA_CODES_VS_URI);
|
||||||
|
vs.setId(RSNA_CODES_VS_ID);
|
||||||
|
vs.setName(RSNA_CODES_VS_NAME);
|
||||||
|
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
myIdToValueSet.put(RSNA_CODES_VS_ID, vs);
|
||||||
|
myValueSets.add(vs);
|
||||||
|
} else {
|
||||||
|
vs = myIdToValueSet.get(RSNA_CODES_VS_ID);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!myCodesInRsnaPlaybookValueSet.contains(loincNumber)) {
|
||||||
|
vs
|
||||||
|
.getCompose()
|
||||||
|
.getIncludeFirstRep()
|
||||||
|
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URL)
|
||||||
|
.addConcept()
|
||||||
|
.setCode(loincNumber)
|
||||||
|
.setDisplay(longCommonName);
|
||||||
|
myCodesInRsnaPlaybookValueSet.add(loincNumber);
|
||||||
|
}
|
||||||
|
|
||||||
|
String loincCodePropName;
|
||||||
|
switch (partTypeName) {
|
||||||
|
case "Rad.Anatomic Location.Region Imaged":
|
||||||
|
loincCodePropName = "rad-anatomic-location-region-imaged";
|
||||||
|
break;
|
||||||
|
case "Rad.Anatomic Location.Imaging Focus":
|
||||||
|
loincCodePropName = "rad-anatomic-location-imaging-focus";
|
||||||
|
break;
|
||||||
|
case "Rad.Modality.Modality type":
|
||||||
|
loincCodePropName = "rad-modality-modality-type";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new InternalErrorException("Unknown PartTypeName: " + partTypeName);
|
||||||
|
}
|
||||||
|
|
||||||
|
TermConcept code = myCode2Concept.get(loincNumber);
|
||||||
|
if (code != null) {
|
||||||
|
code.addPropertyCoding(loincCodePropName, IHapiTerminologyLoaderSvc.LOINC_URL, partNumber, partName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// LOINC Part -> Radlex RID code mappings
|
||||||
|
addMapping(partNumber, partName, RID_MAPPING_CM_ID, RID_MAPPING_CM_URI, RID_MAPPING_CM_NAME, RID_CS_URI, rid, preferredName, Enumerations.ConceptMapEquivalence.EQUAL);
|
||||||
|
|
||||||
|
// LOINC Term -> Radlex RPID code mappings
|
||||||
|
addMapping(loincNumber, longCommonName, RPID_MAPPING_CM_ID, RPID_MAPPING_CM_URI, RPID_MAPPING_CM_NAME, RPID_CS_URI, rpid, longName, Enumerations.ConceptMapEquivalence.EQUAL);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addMapping(String theLoincNumber, String theLongCommonName, String theConceptMapId, String theConceptMapUri, String theConceptMapName, String theTargetCodeSystemUri, String theTargetCode, String theTargetDisplay, Enumerations.ConceptMapEquivalence theEquivalence) {
|
||||||
|
if (isNotBlank(theTargetCode)) {
|
||||||
|
|
||||||
|
ConceptMap conceptMap;
|
||||||
|
if (!myIdToConceptMaps.containsKey(theConceptMapId)) {
|
||||||
|
conceptMap = new ConceptMap();
|
||||||
|
conceptMap.setId(theConceptMapId);
|
||||||
|
conceptMap.setUrl(theConceptMapUri);
|
||||||
|
conceptMap.setName(theConceptMapName);
|
||||||
|
conceptMap.setSource(new CanonicalType(IHapiTerminologyLoaderSvc.LOINC_URL));
|
||||||
|
conceptMap.setTarget(new CanonicalType(theTargetCodeSystemUri));
|
||||||
|
myIdToConceptMaps.put(theConceptMapId, conceptMap);
|
||||||
|
myConceptMaps.add(conceptMap);
|
||||||
|
} else {
|
||||||
|
conceptMap = myIdToConceptMaps.get(theConceptMapId);
|
||||||
|
}
|
||||||
|
|
||||||
|
ConceptMap.SourceElementComponent source = null;
|
||||||
|
ConceptMap.ConceptMapGroupComponent group = conceptMap.getGroupFirstRep();
|
||||||
|
for (ConceptMap.SourceElementComponent next : group.getElement()) {
|
||||||
|
if (next.getCode().equals(theLoincNumber)) {
|
||||||
|
source = next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (source == null) {
|
||||||
|
source = group.addElement();
|
||||||
|
source.setCode(theLoincNumber);
|
||||||
|
source.setDisplay(theLongCommonName);
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean found = false;
|
||||||
|
for (ConceptMap.TargetElementComponent next : source.getTarget()) {
|
||||||
|
if (next.getCode().equals(theTargetCode)) {
|
||||||
|
found = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!found) {
|
||||||
|
source
|
||||||
|
.addTarget()
|
||||||
|
.setCode(theTargetCode)
|
||||||
|
.setDisplay(theTargetDisplay)
|
||||||
|
.setEquivalence(theEquivalence);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.term.snomedct;
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc;
|
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||||
import org.apache.commons.csv.CSVRecord;
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -36,7 +36,7 @@ public final class SctHandlerDescription implements IRecordHandler {
|
||||||
|
|
||||||
String term = theRecord.get("term");
|
String term = theRecord.get("term");
|
||||||
|
|
||||||
TermConcept concept = TerminologyLoaderSvc.getOrCreateConcept(myCodeSystemVersion, myId2concept, id);
|
TermConcept concept = TerminologyLoaderSvcImpl.getOrCreateConcept(myCodeSystemVersion, myId2concept, id);
|
||||||
concept.setCode(conceptId);
|
concept.setCode(conceptId);
|
||||||
concept.setDisplay(term);
|
concept.setDisplay(term);
|
||||||
myCode2concept.put(conceptId, concept);
|
myCode2concept.put(conceptId, concept);
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
ourLog.error("Exceeded maximum wait for connection", e);
|
ourLog.error("Exceeded maximum wait for connection", e);
|
||||||
logGetConnectionStackTrace();
|
logGetConnectionStackTrace();
|
||||||
// if ("true".equals(System.getProperty("ci"))) {
|
// if ("true".equals(System.getStringProperty("ci"))) {
|
||||||
fail("Exceeded maximum wait for connection: " + e.toString());
|
fail("Exceeded maximum wait for connection: " + e.toString());
|
||||||
// }
|
// }
|
||||||
// System.exit(1);
|
// System.exit(1);
|
||||||
|
|
|
@ -60,7 +60,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
ourLog.error("Exceeded maximum wait for connection", e);
|
ourLog.error("Exceeded maximum wait for connection", e);
|
||||||
logGetConnectionStackTrace();
|
logGetConnectionStackTrace();
|
||||||
// if ("true".equals(System.getProperty("ci"))) {
|
// if ("true".equals(System.getStringProperty("ci"))) {
|
||||||
fail("Exceeded maximum wait for connection: " + e.toString());
|
fail("Exceeded maximum wait for connection: " + e.toString());
|
||||||
// }
|
// }
|
||||||
// System.exit(1);
|
// System.exit(1);
|
||||||
|
|
|
@ -1,20 +1,17 @@
|
||||||
package ca.uhn.fhir.jpa.term;
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
import java.io.File;
|
import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
|
||||||
import java.util.HashMap;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
|
import java.util.List;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
|
||||||
|
|
||||||
public class TerminologyLoaderSvcIntegrationTest extends BaseJpaDstu3Test {
|
public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
||||||
|
|
||||||
private TerminologyLoaderSvc myLoader;
|
private TerminologyLoaderSvcImpl myLoader;
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterClassClearContext() {
|
public static void afterClassClearContext() {
|
||||||
|
@ -23,13 +20,14 @@ public class TerminologyLoaderSvcIntegrationTest extends BaseJpaDstu3Test {
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void beforeInitTest() {
|
public void beforeInitTest() {
|
||||||
myLoader = new TerminologyLoaderSvc();
|
myLoader = new TerminologyLoaderSvcImpl();
|
||||||
myLoader.setTermSvcForUnitTests(myTermSvc);
|
myLoader.setTermSvcForUnitTests(myTermSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Ignore
|
@Ignore
|
||||||
public void testLoadAndStoreSnomedCt() {
|
public void testLoadAndStoreLoinc() {
|
||||||
|
List<byte[]> files;
|
||||||
// myLoader.processSnomedCtFiles(files, mySrd);
|
// myLoader.processSnomedCtFiles(files, mySrd);
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,12 +2,14 @@ package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.loinc.LoincPartHandler;
|
||||||
|
import ca.uhn.fhir.jpa.term.loinc.LoincPartRelatedCodeMappingHandler;
|
||||||
|
import ca.uhn.fhir.jpa.term.loinc.LoincRsnaPlaybookHandler;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.commons.lang3.Validate;
|
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.Enumerations;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -18,27 +20,20 @@ import org.mockito.Captor;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.runners.MockitoJUnitRunner;
|
import org.mockito.runners.MockitoJUnitRunner;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.zip.ZipEntry;
|
|
||||||
import java.util.zip.ZipOutputStream;
|
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.contains;
|
import static org.hamcrest.Matchers.contains;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.Matchers.any;
|
||||||
import static org.mockito.Matchers.anyListOf;
|
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
@RunWith(MockitoJUnitRunner.class)
|
@RunWith(MockitoJUnitRunner.class)
|
||||||
public class TerminologyLoaderSvcLoincTest {
|
public class TerminologyLoaderSvcLoincTest {
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcLoincTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcLoincTest.class);
|
||||||
private TerminologyLoaderSvc mySvc;
|
private TerminologyLoaderSvcImpl mySvc;
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
|
@ -48,53 +43,42 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
|
|
||||||
@Captor
|
@Captor
|
||||||
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
||||||
private ArrayList<byte[]> myFiles;
|
|
||||||
@Captor
|
@Captor
|
||||||
private ArgumentCaptor<CodeSystem> mySystemCaptor;
|
private ArgumentCaptor<CodeSystem> mySystemCaptor;
|
||||||
@Mock
|
@Mock
|
||||||
private RequestDetails details;
|
private RequestDetails details;
|
||||||
@Captor
|
@Captor
|
||||||
private ArgumentCaptor<List<ValueSet>> myValueSetsCaptor;
|
private ArgumentCaptor<List<ValueSet>> myValueSetsCaptor;
|
||||||
|
@Captor
|
||||||
|
private ArgumentCaptor<List<ConceptMap>> myConceptMapCaptor;
|
||||||
|
private ZipCollectionBuilder myFiles;
|
||||||
|
|
||||||
|
|
||||||
private void addFile(String theClasspathPrefix, String theClasspathFileName, String theOutputFilename) throws IOException {
|
|
||||||
ByteArrayOutputStream bos;
|
|
||||||
bos = new ByteArrayOutputStream();
|
|
||||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
|
||||||
ourLog.info("Adding {} to test zip", theClasspathFileName);
|
|
||||||
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + theOutputFilename));
|
|
||||||
String classpathName = theClasspathPrefix + theClasspathFileName;
|
|
||||||
InputStream stream = getClass().getResourceAsStream(classpathName);
|
|
||||||
Validate.notNull(stream, "Couldn't load " + classpathName);
|
|
||||||
byte[] byteArray = IOUtils.toByteArray(stream);
|
|
||||||
Validate.notNull(byteArray);
|
|
||||||
zos.write(byteArray);
|
|
||||||
zos.closeEntry();
|
|
||||||
zos.close();
|
|
||||||
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
|
||||||
myFiles.add(bos.toByteArray());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() {
|
public void before() {
|
||||||
mySvc = new TerminologyLoaderSvc();
|
mySvc = new TerminologyLoaderSvcImpl();
|
||||||
mySvc.setTermSvcForUnitTests(myTermSvc);
|
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||||
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
||||||
|
|
||||||
myFiles = new ArrayList<>();
|
myFiles = new ZipCollectionBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLoadLoinc() throws Exception {
|
public void testLoadLoinc() throws Exception {
|
||||||
addFile("/loinc/", "loinc.csv", TerminologyLoaderSvc.LOINC_FILE);
|
myFiles.addFile("/loinc/", "loinc.csv", TerminologyLoaderSvcImpl.LOINC_FILE);
|
||||||
addFile("/loinc/", "hierarchy.csv", TerminologyLoaderSvc.LOINC_HIERARCHY_FILE);
|
myFiles.addFile("/loinc/", "hierarchy.csv", TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
|
||||||
addFile("/loinc/", "AnswerList_Beta_1.csv", TerminologyLoaderSvc.LOINC_ANSWERLIST_FILE);
|
myFiles.addFile("/loinc/", "AnswerList_Beta_1.csv", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
|
||||||
addFile("/loinc/", "LoincAnswerListLink_Beta_1.csv", TerminologyLoaderSvc.LOINC_ANSWERLIST_LINK_FILE);
|
myFiles.addFile("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
|
||||||
|
myFiles.addFile("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_FILE, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
||||||
|
myFiles.addFile("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
|
||||||
|
myFiles.addFile("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE);
|
||||||
|
myFiles.addFile("/loinc/", TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE);
|
||||||
|
myFiles.addFile("/loinc/", TerminologyLoaderSvcImpl.LOINC_RSNA_PLAYBOOK_FILE);
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
mySvc.loadLoinc(myFiles, details);
|
mySvc.loadLoinc(myFiles.getFiles(), details);
|
||||||
|
|
||||||
verify(myTermSvcDstu3, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture());
|
verify(myTermSvcDstu3, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture());
|
||||||
|
|
||||||
TermCodeSystemVersion ver = myCsvCaptor.getValue();
|
TermCodeSystemVersion ver = myCsvCaptor.getValue();
|
||||||
|
|
||||||
|
@ -102,17 +86,29 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
for (TermConcept next : ver.getConcepts()) {
|
for (TermConcept next : ver.getConcepts()) {
|
||||||
concepts.put(next.getCode(), next);
|
concepts.put(next.getCode(), next);
|
||||||
}
|
}
|
||||||
|
Map<String, ValueSet> valueSets = new HashMap<>();
|
||||||
|
for (ValueSet next : myValueSetsCaptor.getValue()) {
|
||||||
|
valueSets.put(next.getId(), next);
|
||||||
|
}
|
||||||
|
Map<String, ConceptMap> conceptMaps = new HashMap<>();
|
||||||
|
for (ConceptMap next : myConceptMapCaptor.getAllValues().get(0)) {
|
||||||
|
conceptMaps.put(next.getId(), next);
|
||||||
|
}
|
||||||
|
ConceptMap conceptMap;
|
||||||
|
TermConcept code;
|
||||||
|
ValueSet vs;
|
||||||
|
ConceptMap.ConceptMapGroupComponent group;
|
||||||
|
|
||||||
// Normal loinc code
|
// Normal loinc code
|
||||||
TermConcept code = concepts.get("10013-1");
|
code = concepts.get("10013-1");
|
||||||
assertEquals("10013-1", code.getCode());
|
assertEquals("10013-1", code.getCode());
|
||||||
assertEquals("Elpot", code.getProperty("PROPERTY"));
|
assertEquals("Elpot", code.getStringProperty("PROPERTY"));
|
||||||
assertEquals("Pt", code.getProperty("TIME_ASPCT"));
|
assertEquals("Pt", code.getStringProperty("TIME_ASPCT"));
|
||||||
assertEquals("R' wave amplitude in lead I", code.getDisplay());
|
assertEquals("R' wave amplitude in lead I", code.getDisplay());
|
||||||
|
|
||||||
// Loinc code with answer
|
// Loinc code with answer
|
||||||
code = concepts.get("61438-8");
|
code = concepts.get("61438-8");
|
||||||
assertThat(code.getProperties("answer-list"), contains("LL1000-0"));
|
assertThat(code.getStringProperties("answer-list"), contains("LL1000-0"));
|
||||||
|
|
||||||
// Answer list
|
// Answer list
|
||||||
code = concepts.get("LL1001-8");
|
code = concepts.get("LL1001-8");
|
||||||
|
@ -127,15 +123,10 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
|
|
||||||
// Answer list code with link to answers-for
|
// Answer list code with link to answers-for
|
||||||
code = concepts.get("LL1000-0");
|
code = concepts.get("LL1000-0");
|
||||||
assertThat(code.getProperties("answers-for"), contains("61438-8"));
|
assertThat(code.getStringProperties("answers-for"), contains("61438-8"));
|
||||||
|
|
||||||
|
|
||||||
// AnswerList valueSet
|
// AnswerList valueSet
|
||||||
Map<String, ValueSet> valueSets = new HashMap<>();
|
vs = valueSets.get("LL1001-8");
|
||||||
for (ValueSet next : myValueSetsCaptor.getValue()) {
|
|
||||||
valueSets.put(next.getId(), next);
|
|
||||||
}
|
|
||||||
ValueSet vs = valueSets.get("LL1001-8");
|
|
||||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, vs.getIdentifier().get(0).getSystem());
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, vs.getIdentifier().get(0).getSystem());
|
||||||
assertEquals("LL1001-8", vs.getIdentifier().get(0).getValue());
|
assertEquals("LL1001-8", vs.getIdentifier().get(0).getValue());
|
||||||
assertEquals("PhenX05_14_30D freq amts", vs.getName());
|
assertEquals("PhenX05_14_30D freq amts", vs.getName());
|
||||||
|
@ -151,9 +142,94 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
assertEquals("LP101394-7", code.getCode());
|
assertEquals("LP101394-7", code.getCode());
|
||||||
assertEquals("adjusted for maternal weight", code.getDisplay());
|
assertEquals("adjusted for maternal weight", code.getDisplay());
|
||||||
|
|
||||||
|
// Part Mappings
|
||||||
|
conceptMap = conceptMaps.get(LoincPartRelatedCodeMappingHandler.LOINC_TO_SNOMED_CM_ID);
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, conceptMap.getSourceCanonicalType().getValueAsString());
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.SCT_URL, conceptMap.getTargetCanonicalType().getValueAsString());
|
||||||
|
assertEquals("This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries.", conceptMap.getCopyright());
|
||||||
|
assertEquals(1, conceptMap.getGroup().size());
|
||||||
|
group = conceptMap.getGroup().get(0);
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, group.getSource());
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.SCT_URL, group.getTarget());
|
||||||
|
assertEquals("http://snomed.info/sct/900000000000207008/version/20170731", group.getTargetVersion());
|
||||||
|
assertEquals("LP18172-4", group.getElement().get(0).getCode());
|
||||||
|
assertEquals("Interferon.beta", group.getElement().get(0).getDisplay());
|
||||||
|
assertEquals(1, group.getElement().get(0).getTarget().size());
|
||||||
|
assertEquals("420710006", group.getElement().get(0).getTarget().get(0).getCode());
|
||||||
|
assertEquals("Interferon beta (substance)", group.getElement().get(0).getTarget().get(0).getDisplay());
|
||||||
|
|
||||||
|
// Document Ontology Parts
|
||||||
|
|
||||||
|
|
||||||
|
// RSNA Playbook ValueSet
|
||||||
|
vs = valueSets.get(LoincRsnaPlaybookHandler.RSNA_CODES_VS_ID);
|
||||||
|
assertEquals(LoincRsnaPlaybookHandler.RSNA_CODES_VS_NAME, vs.getName());
|
||||||
|
assertEquals(LoincRsnaPlaybookHandler.RSNA_CODES_VS_URI, vs.getUrl());
|
||||||
|
assertEquals(1, vs.getCompose().getInclude().size());
|
||||||
|
assertEquals(3, vs.getCompose().getInclude().get(0).getConcept().size());
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, vs.getCompose().getInclude().get(0).getSystem());
|
||||||
|
assertEquals("17787-3", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
|
||||||
|
assertEquals("NM Thyroid gland Study report", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
|
||||||
|
|
||||||
|
// RSNA Playbook Code Parts - Region Imaged
|
||||||
|
code = concepts.get("17787-3");
|
||||||
|
String propertyName = "rad-anatomic-location-region-imaged";
|
||||||
|
assertEquals(1, code.getCodingProperties(propertyName).size());
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, code.getCodingProperties(propertyName).get(0).getSystem());
|
||||||
|
assertEquals("LP199995-4", code.getCodingProperties(propertyName).get(0).getCode());
|
||||||
|
assertEquals("Neck", code.getCodingProperties(propertyName).get(0).getDisplay());
|
||||||
|
// RSNA Playbook Code Parts - Imaging Focus
|
||||||
|
code = concepts.get("17787-3");
|
||||||
|
propertyName = "rad-anatomic-location-imaging-focus";
|
||||||
|
assertEquals(1, code.getCodingProperties(propertyName).size());
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, code.getCodingProperties(propertyName).get(0).getSystem());
|
||||||
|
assertEquals("LP206648-0", code.getCodingProperties(propertyName).get(0).getCode());
|
||||||
|
assertEquals("Thyroid gland", code.getCodingProperties(propertyName).get(0).getDisplay());
|
||||||
|
// RSNA Playbook Code Parts - Modality Type
|
||||||
|
code = concepts.get("17787-3");
|
||||||
|
propertyName = "rad-modality-modality-type";
|
||||||
|
assertEquals(1, code.getCodingProperties(propertyName).size());
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, code.getCodingProperties(propertyName).get(0).getSystem());
|
||||||
|
assertEquals("LP208891-4", code.getCodingProperties(propertyName).get(0).getCode());
|
||||||
|
assertEquals("NM", code.getCodingProperties(propertyName).get(0).getDisplay());
|
||||||
|
|
||||||
|
// RSNA Playbook - LOINC Part -> RadLex RID Mappings
|
||||||
|
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RID_MAPPING_CM_ID);
|
||||||
|
assertEquals(LoincRsnaPlaybookHandler.RID_MAPPING_CM_URI, conceptMap.getUrl());
|
||||||
|
assertEquals(LoincRsnaPlaybookHandler.RID_MAPPING_CM_NAME, conceptMap.getName());
|
||||||
|
assertEquals(1, conceptMap.getGroup().size());
|
||||||
|
group = conceptMap.getGroupFirstRep();
|
||||||
|
// all entries have the same source and target so these should be null
|
||||||
|
assertEquals(null, group.getSource());
|
||||||
|
assertEquals(null, group.getTarget());
|
||||||
|
assertEquals("LP199995-4", group.getElement().get(0).getCode());
|
||||||
|
assertEquals("Neck", group.getElement().get(0).getDisplay());
|
||||||
|
assertEquals(1, group.getElement().get(0).getTarget().size());
|
||||||
|
assertEquals("RID7488", group.getElement().get(0).getTarget().get(0).getCode());
|
||||||
|
assertEquals("neck", group.getElement().get(0).getTarget().get(0).getDisplay());
|
||||||
|
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL, group.getElement().get(0).getTarget().get(0).getEquivalence());
|
||||||
|
|
||||||
|
// RSNA Playbook - LOINC Term -> RadLex RPID Mappings
|
||||||
|
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_ID);
|
||||||
|
assertEquals(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_URI, conceptMap.getUrl());
|
||||||
|
assertEquals(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_NAME, conceptMap.getName());
|
||||||
|
assertEquals(1, conceptMap.getGroup().size());
|
||||||
|
group = conceptMap.getGroupFirstRep();
|
||||||
|
// all entries have the same source and target so these should be null
|
||||||
|
assertEquals(null, group.getSource());
|
||||||
|
assertEquals(null, group.getTarget());
|
||||||
|
assertEquals("24531-6", group.getElement().get(0).getCode());
|
||||||
|
assertEquals("US Retroperitoneum", group.getElement().get(0).getDisplay());
|
||||||
|
assertEquals(1, group.getElement().get(0).getTarget().size());
|
||||||
|
assertEquals("RPID2142", group.getElement().get(0).getTarget().get(0).getCode());
|
||||||
|
assertEquals("US Retroperitoneum", group.getElement().get(0).getTarget().get(0).getDisplay());
|
||||||
|
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL, group.getElement().get(0).getTarget().get(0).getEquivalence());
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterClassClearContext() {
|
public static void afterClassClearContext() {
|
||||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
|
|
@ -7,8 +7,8 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -22,9 +22,7 @@ import org.mockito.runners.MockitoJUnitRunner;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.zip.ZipEntry;
|
|
||||||
import java.util.zip.ZipOutputStream;
|
import java.util.zip.ZipOutputStream;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.*;
|
||||||
|
@ -37,7 +35,7 @@ import static org.mockito.Mockito.verify;
|
||||||
@RunWith(MockitoJUnitRunner.class)
|
@RunWith(MockitoJUnitRunner.class)
|
||||||
public class TerminologyLoaderSvcSnomedCtTest {
|
public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcSnomedCtTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcSnomedCtTest.class);
|
||||||
private TerminologyLoaderSvc mySvc;
|
private TerminologyLoaderSvcImpl mySvc;
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
|
@ -45,21 +43,15 @@ public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
||||||
@Mock
|
@Mock
|
||||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
||||||
|
private ZipCollectionBuilder myFiles;
|
||||||
private void addEntry(ZipOutputStream zos, String theClasspathPrefix, String theFileName) throws IOException {
|
|
||||||
ourLog.info("Adding {} to test zip", theFileName);
|
|
||||||
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + theFileName));
|
|
||||||
byte[] byteArray = IOUtils.toByteArray(getClass().getResourceAsStream(theClasspathPrefix + theFileName));
|
|
||||||
Validate.notNull(byteArray);
|
|
||||||
zos.write(byteArray);
|
|
||||||
zos.closeEntry();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() {
|
public void before() {
|
||||||
mySvc = new TerminologyLoaderSvc();
|
mySvc = new TerminologyLoaderSvcImpl();
|
||||||
mySvc.setTermSvcForUnitTests(myTermSvc);
|
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||||
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
||||||
|
|
||||||
|
myFiles = new ZipCollectionBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<byte[]> list(byte[]... theByteArray) {
|
private List<byte[]> list(byte[]... theByteArray) {
|
||||||
|
@ -68,23 +60,18 @@ public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLoadSnomedCt() throws Exception {
|
public void testLoadSnomedCt() throws Exception {
|
||||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
myFiles.addFile("/sct/", "sct2_Concept_Full_INT_20160131.txt");
|
||||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
myFiles.addFile("/sct/", "sct2_Concept_Full-en_INT_20160131.txt");
|
||||||
addEntry(zos, "/sct/", "sct2_Concept_Full_INT_20160131.txt");
|
myFiles.addFile("/sct/", "sct2_Description_Full-en_INT_20160131.txt");
|
||||||
addEntry(zos, "/sct/", "sct2_Concept_Full-en_INT_20160131.txt");
|
myFiles.addFile("/sct/", "sct2_Identifier_Full_INT_20160131.txt");
|
||||||
addEntry(zos, "/sct/", "sct2_Description_Full-en_INT_20160131.txt");
|
myFiles.addFile("/sct/", "sct2_Relationship_Full_INT_20160131.txt");
|
||||||
addEntry(zos, "/sct/", "sct2_Identifier_Full_INT_20160131.txt");
|
myFiles.addFile("/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt");
|
||||||
addEntry(zos, "/sct/", "sct2_Relationship_Full_INT_20160131.txt");
|
myFiles.addFile("/sct/", "sct2_TextDefinition_Full-en_INT_20160131.txt");
|
||||||
addEntry(zos, "/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt");
|
|
||||||
addEntry(zos, "/sct/", "sct2_TextDefinition_Full-en_INT_20160131.txt");
|
|
||||||
zos.close();
|
|
||||||
|
|
||||||
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
|
||||||
|
|
||||||
RequestDetails details = mock(RequestDetails.class);
|
RequestDetails details = mock(RequestDetails.class);
|
||||||
mySvc.loadSnomedCt(list(bos.toByteArray()), details);
|
mySvc.loadSnomedCt(myFiles.getFiles(), details);
|
||||||
|
|
||||||
verify(myTermSvcDstu3).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyListOf(ValueSet.class));
|
verify(myTermSvcDstu3).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyListOf(ValueSet.class), anyListOf(ConceptMap.class));
|
||||||
|
|
||||||
TermCodeSystemVersion csv = myCsvCaptor.getValue();
|
TermCodeSystemVersion csv = myCsvCaptor.getValue();
|
||||||
TreeSet<String> allCodes = toCodes(csv, true);
|
TreeSet<String> allCodes = toCodes(csv, true);
|
||||||
|
@ -115,7 +102,7 @@ public class TerminologyLoaderSvcSnomedCtTest {
|
||||||
public void testLoadSnomedCtBadInput() throws Exception {
|
public void testLoadSnomedCtBadInput() throws Exception {
|
||||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||||
addEntry(zos, "/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt");
|
myFiles.addFile("/sct/", "sct2_StatedRelationship_Full_INT_20160131.txt");
|
||||||
zos.close();
|
zos.close();
|
||||||
|
|
||||||
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
||||||
|
|
|
@ -179,13 +179,13 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
|
||||||
parentA.addChild(childAA, RelationshipTypeEnum.ISA);
|
parentA.addChild(childAA, RelationshipTypeEnum.ISA);
|
||||||
|
|
||||||
TermConcept childAAA = new TermConcept(cs, "childAAA");
|
TermConcept childAAA = new TermConcept(cs, "childAAA");
|
||||||
childAAA.addProperty("propA", "valueAAA");
|
childAAA.addPropertyString("propA", "valueAAA");
|
||||||
childAAA.addProperty("propB", "foo");
|
childAAA.addPropertyString("propB", "foo");
|
||||||
childAA.addChild(childAAA, RelationshipTypeEnum.ISA);
|
childAA.addChild(childAAA, RelationshipTypeEnum.ISA);
|
||||||
|
|
||||||
TermConcept childAAB = new TermConcept(cs, "childAAB");
|
TermConcept childAAB = new TermConcept(cs, "childAAB");
|
||||||
childAAB.addProperty("propA", "valueAAB");
|
childAAB.addPropertyString("propA", "valueAAB");
|
||||||
childAAB.addProperty("propB", "foo");
|
childAAB.addPropertyString("propB", "foo");
|
||||||
childAA.addChild(childAAB, RelationshipTypeEnum.ISA);
|
childAA.addChild(childAAB, RelationshipTypeEnum.ISA);
|
||||||
|
|
||||||
TermConcept childAB = new TermConcept(cs, "childAB");
|
TermConcept childAB = new TermConcept(cs, "childAB");
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.zip.ZipEntry;
|
||||||
|
import java.util.zip.ZipOutputStream;
|
||||||
|
|
||||||
|
public class ZipCollectionBuilder {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(ZipCollectionBuilder.class);
|
||||||
|
private final ArrayList<byte[]> myFiles;
|
||||||
|
|
||||||
|
public ZipCollectionBuilder() {
|
||||||
|
myFiles = new ArrayList<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addFile(String theClasspathPrefix, String theClasspathFileName) throws IOException {
|
||||||
|
addFile(theClasspathPrefix, theClasspathFileName, theClasspathFileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addFile(String theClasspathPrefix, String theClasspathFileName, String theOutputFilename) throws IOException {
|
||||||
|
ByteArrayOutputStream bos;
|
||||||
|
bos = new ByteArrayOutputStream();
|
||||||
|
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||||
|
ourLog.info("Adding {} to test zip", theClasspathFileName);
|
||||||
|
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + theOutputFilename));
|
||||||
|
String classpathName = theClasspathPrefix + theClasspathFileName;
|
||||||
|
InputStream stream = getClass().getResourceAsStream(classpathName);
|
||||||
|
Validate.notNull(stream, "Couldn't load " + classpathName);
|
||||||
|
byte[] byteArray = IOUtils.toByteArray(stream);
|
||||||
|
Validate.notNull(byteArray);
|
||||||
|
zos.write(byteArray);
|
||||||
|
zos.closeEntry();
|
||||||
|
zos.close();
|
||||||
|
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
||||||
|
myFiles.add(bos.toByteArray());
|
||||||
|
}
|
||||||
|
|
||||||
|
public ArrayList<byte[]> getFiles() {
|
||||||
|
return myFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
"LoincNumber","PartNumber","PartTypeName","PartSequenceOrder","PartName"
|
||||||
|
"11488-4","LP173418-7","Document.Kind","1","Note"
|
||||||
|
"11488-4","LP173110-0","Document.TypeOfService","1","Consultation"
|
||||||
|
"11488-4","LP173061-5","Document.Setting","1","{Setting}"
|
||||||
|
"11488-4","LP187187-2","Document.Role","1","{Role}"
|
||||||
|
"11490-0","LP173418-7","Document.Kind","1","Note"
|
||||||
|
"11490-0","LP173221-5","Document.TypeOfService","1","Discharge summary"
|
||||||
|
"11490-0","LP173061-5","Document.Setting","1","{Setting}"
|
||||||
|
"11490-0","LP173084-7","Document.Role","1","Physician"
|
||||||
|
"11492-6","LP173418-7","Document.Kind","1","Note"
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
"LoincNumber","LongCommonName" ,"PartNumber","PartTypeName" ,"PartName" ,"PartSequenceOrder","RID" ,"PreferredName" ,"RPID" ,"LongName"
|
||||||
|
"17787-3" ,"NM Thyroid gland Study report","LP199995-4","Rad.Anatomic Location.Region Imaged","Neck" ,"A" ,"RID7488" ,"neck" ,"" ,""
|
||||||
|
"17787-3" ,"NM Thyroid gland Study report","LP206648-0","Rad.Anatomic Location.Imaging Focus","Thyroid gland" ,"A" ,"RID7578" ,"thyroid gland" ,"" ,""
|
||||||
|
"17787-3" ,"NM Thyroid gland Study report","LP208891-4","Rad.Modality.Modality type" ,"NM" ,"A" ,"RID10330","nuclear medicine imaging","" ,""
|
||||||
|
"24531-6" ,"US Retroperitoneum" ,"LP207608-3","Rad.Modality.Modality type" ,"US" ,"A" ,"RID10326","Ultrasound" ,"RPID2142","US Retroperitoneum"
|
||||||
|
"24531-6" ,"US Retroperitoneum" ,"LP199943-4","Rad.Anatomic Location.Imaging Focus","Retroperitoneum" ,"A" ,"RID431" ,"RETROPERITONEUM" ,"RPID2142","US Retroperitoneum"
|
||||||
|
"24531-6" ,"US Retroperitoneum" ,"LP199956-6","Rad.Anatomic Location.Region Imaged","Abdomen" ,"A" ,"RID56" ,"Abdomen" ,"RPID2142","US Retroperitoneum"
|
||||||
|
"24532-4" ,"US Abdomen RUQ" ,"LP199956-6","Rad.Anatomic Location.Region Imaged","Abdomen" ,"A" ,"RID56" ,"Abdomen" ,"" ,""
|
||||||
|
"24532-4" ,"US Abdomen RUQ" ,"LP207608-3","Rad.Modality.Modality type" ,"US" ,"A" ,"RID10326","Ultrasound" ,"" ,""
|
||||||
|
"24532-4" ,"US Abdomen RUQ" ,"LP208105-9","Rad.Anatomic Location.Imaging Focus","Right upper quadrant","A" ,"RID29994","Right upper quadrant" ,"" ,""
|
Can't render this file because it contains an unexpected character in line 1 and column 30.
|
|
@ -0,0 +1,10 @@
|
||||||
|
"PartNumber","PartName","PartTypeName","ExtCodeId","ExtCodeDisplayName","ExtCodeSystem","MapType","ContentOrigin","ExtCodeSystemVersion","ExtCodeSystemCopyrightNotice"
|
||||||
|
"LP18172-4","Interferon.beta","COMPONENT"," 420710006","Interferon beta (substance)","http://snomed.info/sct","Exact","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP31706-2","Nornicotine","COMPONENT","1018001","Nornicotine (substance)","http://snomed.info/sct","Exact","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP15826-8","Prostaglandin F2","COMPONENT","10192006","Prostaglandin PGF2 (substance)","http://snomed.info/sct","Exact","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP7400-7","Liver","SYSTEM","10200004","Liver structure (body structure)","http://snomed.info/sct","LOINC broader","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP29165-5","Liver.FNA","SYSTEM","10200004","Liver structure (body structure)","http://snomed.info/sct","LOINC broader","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP15666-8","Inosine","COMPONENT","102640000","Inosine (substance)","http://snomed.info/sct","Exact","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP15943-1","Uronate","COMPONENT","102641001","Uronic acid (substance)","http://snomed.info/sct","Exact","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP15791-4","Phenylketones","COMPONENT","102642008","Phenylketones (substance)","http://snomed.info/sct","Exact","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
||||||
|
"LP15721-1","Malonate","COMPONENT","102648007","Malonic acid (substance)","http://snomed.info/sct","Exact","Both","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO.This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org<mailto:info@snomed.org>. This may incur a fee in SNOMED International non-Member countries."
|
|
|
@ -7,3 +7,4 @@
|
||||||
"LP173483-1","ADJUSTMENT","post cyanocobalamin",,"ACTIVE"
|
"LP173483-1","ADJUSTMENT","post cyanocobalamin",,"ACTIVE"
|
||||||
"LP173484-9","ADJUSTMENT","W hyperextension)",,"ACTIVE"
|
"LP173484-9","ADJUSTMENT","W hyperextension)",,"ACTIVE"
|
||||||
"LP6244-0","METHOD","EKG","Electrocardiogram (EKG)","ACTIVE"
|
"LP6244-0","METHOD","EKG","Electrocardiogram (EKG)","ACTIVE"
|
||||||
|
"LP18172-4","COMPONENT","Interferon.beta","Interferon beta","ACTIVE"
|
||||||
|
|
|
|
@ -1,12 +1,15 @@
|
||||||
"LOINC_NUM","COMPONENT" ,"PROPERTY","TIME_ASPCT","SYSTEM" ,"SCALE_TYP","METHOD_TYP","CLASS" ,"SOURCE","VersionLastChanged","CHNG_TYPE","DefinitionDescription","STATUS","CONSUMER_NAME","CLASSTYPE","FORMULA","SPECIES","EXMPL_ANSWERS","SURVEY_QUEST_TEXT" ,"SURVEY_QUEST_SRC" ,"UNITSREQUIRED","SUBMITTED_UNITS","RELATEDNAMES2" ,"SHORTNAME" ,"ORDER_OBS" ,"CDISC_COMMON_TESTS","HL7_FIELD_SUBFIELD_ID","EXTERNAL_COPYRIGHT_NOTICE","EXAMPLE_UNITS","LONG_COMMON_NAME" ,"UnitsAndRange","DOCUMENT_SECTION","EXAMPLE_UCUM_UNITS","EXAMPLE_SI_UCUM_UNITS","STATUS_REASON","STATUS_TEXT","CHANGE_REASON_PUBLIC" ,"COMMON_TEST_RANK","COMMON_ORDER_RANK","COMMON_SI_TEST_RANK","HL7_ATTACHMENT_STRUCTURE","EXTERNAL_COPYRIGHT_LINK","PanelType","AskAtOrderEntry","AssociatedObservations"
|
"LOINC_NUM","COMPONENT" ,"PROPERTY","TIME_ASPCT","SYSTEM" ,"SCALE_TYP","METHOD_TYP" ,"CLASS" ,"VersionLastChanged","CHNG_TYPE","DefinitionDescription" ,"STATUS","CONSUMER_NAME","CLASSTYPE","FORMULA","SPECIES","EXMPL_ANSWERS","SURVEY_QUEST_TEXT" ,"SURVEY_QUEST_SRC" ,"UNITSREQUIRED","SUBMITTED_UNITS","RELATEDNAMES2" ,"SHORTNAME" ,"ORDER_OBS" ,"CDISC_COMMON_TESTS","HL7_FIELD_SUBFIELD_ID","EXTERNAL_COPYRIGHT_NOTICE","EXAMPLE_UNITS","LONG_COMMON_NAME" ,"UnitsAndRange","DOCUMENT_SECTION","EXAMPLE_UCUM_UNITS","EXAMPLE_SI_UCUM_UNITS","STATUS_REASON","STATUS_TEXT","CHANGE_REASON_PUBLIC" ,"COMMON_TEST_RANK","COMMON_ORDER_RANK","COMMON_SI_TEST_RANK","HL7_ATTACHMENT_STRUCTURE","EXTERNAL_COPYRIGHT_LINK","PanelType","AskAtOrderEntry","AssociatedObservations" ,"VersionFirstReleased","ValidHL7AttachmentRequest"
|
||||||
"10013-1" ,"R' wave amplitude.lead I" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-I; R wave Amp L-I; Random; Right; Voltage" ,"R' wave Amp L-I" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead I" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10013-1" ,"R' wave amplitude.lead I" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-I; R wave Amp L-I; Random; Right; Voltage" ,"R' wave Amp L-I" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead I" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"10014-9" ,"R' wave amplitude.lead II" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"2; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-II; R wave Amp L-II; Random; Right; Voltage" ,"R' wave Amp L-II" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead II" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10014-9" ,"R' wave amplitude.lead II" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"2; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-II; R wave Amp L-II; Random; Right; Voltage" ,"R' wave Amp L-II" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead II" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"10015-6" ,"R' wave amplitude.lead III" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"3; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-III; R wave Amp L-III; Random; Right; Voltage" ,"R' wave Amp L-III" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead III" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10015-6" ,"R' wave amplitude.lead III" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"3; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-III; R wave Amp L-III; Random; Right; Voltage" ,"R' wave Amp L-III" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead III" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"10016-4" ,"R' wave amplitude.lead V1" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V1; R wave Amp L-V1; Random; Right; Voltage" ,"R' wave Amp L-V1" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V1" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10016-4" ,"R' wave amplitude.lead V1" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V1; R wave Amp L-V1; Random; Right; Voltage" ,"R' wave Amp L-V1" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V1" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"1001-7" ,"DBG Ab" ,"Pr" ,"Pt" ,"Ser/Plas^donor","Ord" , ,"BLDBK" ,"FS" ,"2.44" ,"MIN" , ,"ACTIVE", ,1 , , , , , , , ,"ABS; Aby; Antby; Anti; Antibodies; Antibody; Autoantibodies; Autoantibody; BLOOD BANK; Donna Bennett-Goodspeed; Donr; Ordinal; Pl; Plasma; Plsm; Point in time; QL; Qual; Qualitative; Random; Screen; SerP; SerPl; SerPl^donor; SerPlas; Serum; Serum or plasma; SR","DBG Ab SerPl Donr Ql" ,"Observation", , , , ,"DBG Ab [Presence] in Serum or Plasma from donor" , , , , , , ,"The Property has been changed from ACnc to Pr (Presence) to reflect the new model for ordinal terms where results are based on presence or absence.",0 ,0 ,0 , , , , ,
|
"1001-7" ,"DBG Ab" ,"Pr" ,"Pt" ,"Ser/Plas^donor" ,"Ord" , ,"BLDBK" ,"2.44" ,"MIN" , ,"ACTIVE", ,1 , , , , , , , ,"ABS; Aby; Antby; Anti; Antibodies; Antibody; Autoantibodies; Autoantibody; BLOOD BANK; Donna Bennett-Goodspeed; Donr; Ordinal; Pl; Plasma; Plsm; Point in time; QL; Qual; Qualitative; Random; Screen; SerP; SerPl; SerPl^donor; SerPlas; Serum; Serum or plasma; SR" ,"DBG Ab SerPl Donr Ql" ,"Observation", , , , ,"DBG Ab [Presence] in Serum or Plasma from donor" , , , , , , ,"The Property has been changed from ACnc to Pr (Presence) to reflect the new model for ordinal terms where results are based on presence or absence." ,0 ,0 ,0 , , , , , , ,
|
||||||
"10017-2" ,"R' wave amplitude.lead V2" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V2; R wave Amp L-V2; Random; Right; Voltage" ,"R' wave Amp L-V2" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V2" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10017-2" ,"R' wave amplitude.lead V2" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V2; R wave Amp L-V2; Random; Right; Voltage" ,"R' wave Amp L-V2" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V2" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"10018-0" ,"R' wave amplitude.lead V3" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V3; R wave Amp L-V3; Random; Right; Voltage" ,"R' wave Amp L-V3" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V3" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10018-0" ,"R' wave amplitude.lead V3" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V3; R wave Amp L-V3; Random; Right; Voltage" ,"R' wave Amp L-V3" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V3" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"10019-8" ,"R' wave amplitude.lead V4" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V4; R wave Amp L-V4; Random; Right; Voltage" ,"R' wave Amp L-V4" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V4" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10019-8" ,"R' wave amplitude.lead V4" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V4; R wave Amp L-V4; Random; Right; Voltage" ,"R' wave Amp L-V4" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V4" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"10020-6" ,"R' wave amplitude.lead V5" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V5; R wave Amp L-V5; Random; Right; Voltage" ,"R' wave Amp L-V5" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V5" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
"10020-6" ,"R' wave amplitude.lead V5" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V5; R wave Amp L-V5; Random; Right; Voltage" ,"R' wave Amp L-V5" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V5" , , ,"mV" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30D","Find" ,"Pt" ,"^Patient" ,"Ord" ,"PhenX" ,"PHENX" ,"PhenX" ,"2.44" ,"MIN" , ,"TRIAL" , ,2 , , , ,"Each time you eat bread, toast or dinner rolls, how much do you usually eat?","PhenX.050201100100","N" , ,"Finding; Findings; How much bread in 30D; Last; Ordinal; Point in time; QL; Qual; Qualitative; Random; Screen" ,"How much bread in 30D PhenX", , , , , ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]", , , , , , , ,0 ,0 ,0 , , , , ,
|
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30D","Find" ,"Pt" ,"^Patient" ,"Ord" ,"PhenX" ,"PHENX" ,"2.44" ,"MIN" , ,"TRIAL" , ,2 , , , ,"Each time you eat bread, toast or dinner rolls, how much do you usually eat?","PhenX.050201100100","N" , ,"Finding; Findings; How much bread in 30D; Last; Ordinal; Point in time; QL; Qual; Qualitative; Random; Screen" ,"How much bread in 30D PhenX", , , , , ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]", , , , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
"10000-8" ,"R wave duration.lead AVR" ,"Time" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; Durat; ECG; EKG.MEASUREMENTS; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave dur L-AVR; R wave dur L-AVR; Random; Right" ,"R wave dur L-AVR" ,"Observation", , , ,"s" ,"R wave duration in lead AVR" , , ,"s" , , , , ,0 ,0 ,0 , , , , ,
|
"10000-8" ,"R wave duration.lead AVR" ,"Time" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; Durat; ECG; EKG.MEASUREMENTS; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave dur L-AVR; R wave dur L-AVR; Random; Right" ,"R wave dur L-AVR" ,"Observation", , , ,"s" ,"R wave duration in lead AVR" , , ,"s" , , , , ,0 ,0 ,0 , , , , , , ,
|
||||||
|
"17787-3" ,"Study report" ,"Find" ,"Pt" ,"Neck>Thyroid gland","Doc" ,"NM" ,"RAD" ,"2.61" ,"MIN" , ,"ACTIVE", ,2 , , , , , , , ,"Document; Finding; Findings; Imaging; Point in time; Radiology; Random; Study report; Thy" ,"NM Thyroid Study report" ,"Both" , , , , ,"NM Thyroid gland Study report" , , , , , , ,"Changed System from ""Thyroid"" for conformance with the LOINC/RadLex unified model.; Method of ""Radnuc"" was changed to ""NM"". The LOINC/RadLex Committee agreed to use a subset of the two-letter DICOM modality codes as the primary modality identifier." ,0 ,0 ,0 ,"IG exists" , , , ,"81220-6;72230-6" ,"1.0l" ,
|
||||||
|
"17788-1" ,"Large unstained cells/100 leukocytes" ,"NFr" ,"Pt" ,"Bld" ,"Qn" ,"Automated count","HEM/BC" ,"2.50" ,"MIN" ,"Part of auto diff output of Bayer H*3S; peroxidase negative cells too large to be classified as lymph or basophil","ACTIVE", ,1 , , , , , ,"Y" ,"%" ,"100WBC; Auto; Automated detection; Blood; Cell; Cellularity; Elec; Elect; Electr; HEMATOLOGY/CELL COUNTS; Leuc; Leuk; Leukocyte; Lkcs; LUC; Number Fraction; Percent; Point in time; QNT; Quan; Quant; Quantitative; Random; WB; WBC; WBCs; White blood cell; White blood cells; Whole blood","LUC/leuk NFr Bld Auto" ,"Observation", , , ,"%" ,"Large unstained cells/100 leukocytes in Blood by Automated count" , , ,"%" , , , , ,1894 ,0 ,1894 , , , , , ,"1.0l" ,
|
||||||
|
"11488-4" ,"Consultation note" ,"Find" ,"Pt" ,"{Setting}" ,"Doc" ,"{Role}" ,"DOC.ONTOLOGY","2.63" ,"MIN" , ,"ACTIVE", ,2 , , , , , , , ,"Consult note; DOC.ONT; Document; Encounter; Evaluation and management; Evaluation and management note; Finding; Findings; notes; Point in time; Random; Visit note" ,"Consult note" ,"Both" , , , , ,"Consult note" , , , , , , ,"Edit made because this term is conformant to the Document Ontology axis values and therefore are being placed in this class.; Based on Clinical LOINC Committee decision during the September 2014 meeting, {Provider} was changed to {Author Type} to emphasize a greater breadth of potential document authors. At the September 2015 Clinical LOINC Committee meeting, the Committee decided to change {Author Type} to {Role} to align with the 'Role' axis name in the LOINC Document Ontology.; Because it is too difficult to maintain and because the distinction between documents and sections is not clear-cut nor necessary in most cases, the DOCUMENT_SECTION field has been deemed to have little value. The field has been set to null in the December 2017 release in preparation for removal in the December 2018 release. These changes were approved by the Clinical LOINC Committee.",0 ,0 ,0 ,"IG exists" , , , ,"81222-2;72231-4;81243-8","1.0j-a" ,"Y"
|
||||||
|
|
Can't render this file because it contains an unexpected character in line 1 and column 23.
|
Loading…
Reference in New Issue