Work on loinc mapping

This commit is contained in:
James Agnew 2018-03-10 20:31:27 -03:00
parent 611ee457cb
commit 5a653aeb20
28 changed files with 535 additions and 488 deletions

View File

@ -1,4 +1,22 @@
Database migration:
update table TRM_CODESYSTEM_VER drop column RES_VERSION_ID;
TODO:
In answer lists, figure out what to do with externally defined lists
Comments for Loinc:
Answer Lists
- Per the notes, there is no way in FHIR currently to map answer lists to
codes based on context. For this reason, I am ignoring any entries in
LoincAnswerListLink_Beta_1.csv where the "ApplicableContext" context is
not empty. Is this correct?
Parts
- Only parts with a status of "ACTIVE" are being imported, any others are
ignored.
- The PartTypeName (e.g. "ADJUSTMENT") is ignored as there is no corresponding
property in loinc.xml
- PartDisplayName is not mapped

View File

@ -91,7 +91,7 @@ ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number pr
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!

View File

@ -110,12 +110,7 @@ public class BaseDstu3Config extends BaseConfig {
}
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcImpl();
}
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvcDstu3 terminologyServiceDstu3() {
public IHapiTerminologySvcDstu3 terminologyService() {
return new HapiTerminologySvcDstu3();
}

View File

@ -65,6 +65,18 @@ public class BaseR4Config extends BaseConfig {
return retVal;
}
@Bean(name = "myGraphQLProvider")
@Lazy
public GraphQLProvider graphQLProvider() {
return new GraphQLProvider(fhirContextR4(), validationSupportChainR4(), graphqlStorageServices());
}
@Bean
@Lazy
public GraphQLEngine.IGraphQLStorageServices graphqlStorageServices() {
return new JpaStorageServices();
}
@Bean(name = "myInstanceValidatorR4")
@Lazy
public IValidatorModule instanceValidatorR4() {
@ -86,12 +98,6 @@ public class BaseR4Config extends BaseConfig {
return searchDao;
}
@Bean(name = "myGraphQLProvider")
@Lazy
public GraphQLProvider graphQLProvider() {
return new GraphQLProvider(fhirContextR4(), validationSupportChainR4(), graphqlStorageServices());
}
@Bean(autowire = Autowire.BY_TYPE)
public SearchParamExtractorR4 searchParamExtractor() {
return new SearchParamExtractorR4();
@ -122,7 +128,7 @@ public class BaseR4Config extends BaseConfig {
}
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvcR4 terminologyServiceR4() {
public IHapiTerminologySvcR4 terminologyService() {
return new HapiTerminologySvcR4();
}
@ -139,10 +145,4 @@ public class BaseR4Config extends BaseConfig {
return new JpaValidationSupportChainR4();
}
@Bean
@Lazy
public GraphQLEngine.IGraphQLStorageServices graphqlStorageServices() {
return new JpaStorageServices();
}
}

View File

@ -229,6 +229,16 @@ public class TermConcept implements Serializable {
return null;
}
public List<String> getProperties(String thePropertyName) {
List<String> retVal = new ArrayList<>();
for (TermConceptProperty next : getProperties()) {
if (thePropertyName.equals(next.getKey())) {
retVal.add(next.getValue());
}
}
return retVal;
}
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();

View File

@ -20,7 +20,8 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import org.hibernate.search.annotations.Field;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.validator.constraints.NotBlank;
import javax.persistence.*;
@ -37,38 +38,43 @@ public class TermConceptProperty implements Serializable {
@ManyToOne
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
private TermConcept myConcept;
@Id()
@SequenceGenerator(name = "SEQ_CONCEPT_PROP_PID", sequenceName = "SEQ_CONCEPT_PROP_PID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PROP_PID")
@Column(name = "PID")
private Long myId;
@Column(name="PROP_KEY", length=200, nullable=false)
@Column(name = "PROP_KEY", length = 200, nullable = false)
@NotBlank
private String myKey;
@Column(name="PROP_VAL", length=200, nullable=true)
@Column(name = "PROP_VAL", length = 200, nullable = true)
private String myValue;
public String getKey() {
return myKey;
}
public String getValue() {
return myValue;
}
public void setConcept(TermConcept theConcept) {
myConcept = theConcept;
}
public void setKey(String theKey) {
myKey = theKey;
}
public String getValue() {
return myValue;
}
public void setValue(String theValue) {
myValue = theValue;
}
public void setConcept(TermConcept theConcept) {
myConcept = theConcept;
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("key", myKey)
.append("value", myValue)
.toString();
}
}

View File

@ -23,12 +23,14 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ObjectUtil;
@ -44,6 +46,8 @@ import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ValueSet;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
@ -65,8 +69,8 @@ import java.util.concurrent.TimeUnit;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcImpl.class);
public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiTerminologySvcImpl.class);
private static final Object PLACEHOLDER_OBJECT = new Object();
private static boolean ourForceSaveDeferredAlwaysForUnitTest;
@Autowired
@ -93,7 +97,7 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
@Autowired
private PlatformTransactionManager myTransactionMgr;
@Autowired
private IVersionSpecificValidationSupport myVersionSpecificValidationSupport;
private IFhirResourceDaoCodeSystem<?, ?, ?> myCodeSystemResourceDao;
private void addCodeIfNotAlreadyAdded(String system, ValueSet.ValueSetExpansionComponent retVal, Set<String> addedCodes, TermConcept nextConcept) {
if (addedCodes.add(nextConcept.getCode())) {
@ -124,13 +128,17 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
boolean retVal = theSetToPopulate.add(theConcept);
if (retVal) {
if (theSetToPopulate.size() >= myDaoConfig.getMaximumExpansionSize()) {
String msg = myContext.getLocalizer().getMessage(HapiTerminologySvcImpl.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvcImpl.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
throw new InvalidRequestException(msg);
}
}
return retVal;
}
protected abstract IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource, RequestDetails theRequestDetails);
abstract void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails);
@Override
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
@ -241,7 +249,7 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
} else {
// bool.must(qb.keyword().onField("myProperties").matching(nextFilter.getProperty()+"="+nextFilter.getValue()).createQuery());
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty()+"="+nextFilter.getValue()).createQuery());
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty() + "=" + nextFilter.getValue()).createQuery());
}
}
@ -340,7 +348,7 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
public List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode) {
TermCodeSystem cs = getCodeSystem(theSystem);
if (cs == null) {
return myVersionSpecificValidationSupport.findCodesAboveUsingBuiltInSystems(theSystem, theCode);
return findCodesAboveUsingBuiltInSystems(theSystem, theCode);
}
TermCodeSystemVersion csv = cs.getCurrentVersion();
@ -372,7 +380,7 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
public List<VersionIndependentConcept> findCodesBelow(String theSystem, String theCode) {
TermCodeSystem cs = getCodeSystem(theSystem);
if (cs == null) {
return myVersionSpecificValidationSupport.findCodesBelowUsingBuiltInSystems(theSystem, theCode);
return findCodesBelowUsingBuiltInSystems(theSystem, theCode);
}
TermCodeSystemVersion csv = cs.getCurrentVersion();
@ -651,7 +659,7 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
myCodeSystemDao.save(codeSystem);
} else {
if (!ObjectUtil.equals(codeSystem.getResource().getId(), theCodeSystemVersion.getResource().getId())) {
String msg = myContext.getLocalizer().getMessage(HapiTerminologySvcImpl.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri,
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvcImpl.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri,
codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
throw new UnprocessableEntityException(msg);
}
@ -701,6 +709,27 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
}
}
@Override
@Transactional(propagation = Propagation.REQUIRED)
public void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
IIdType csId = createOrUpdateCodeSystem(theCodeSystemResource, theRequestDetails);
ResourceTable resource = (ResourceTable) myCodeSystemResourceDao.readEntity(csId);
Long codeSystemResourcePid = resource.getId();
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
theCodeSystemVersion.setResource(resource);
storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemVersion);
for (ValueSet nextValueSet : theValueSets) {
createOrUpdateValueSet(nextValueSet, theRequestDetails);
}
}
@Override
public boolean supportsSystem(String theSystem) {
TermCodeSystem cs = getCodeSystem(theSystem);

View File

@ -20,23 +20,107 @@ package ca.uhn.fhir.jpa.term;
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ValueSet;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
public class HapiTerminologySvcDstu2 extends HapiTerminologySvcImpl {
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
@Autowired
private IValidationSupport myValidationSupport;
private void addAllChildren(String theSystemString, org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent theCode, List<VersionIndependentConcept> theListToPopulate) {
if (isNotBlank(theCode.getCode())) {
theListToPopulate.add(new VersionIndependentConcept(theSystemString, theCode.getCode()));
}
for (org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent nextChild : theCode.getConcept()) {
addAllChildren(theSystemString, nextChild, theListToPopulate);
}
}
private boolean addTreeIfItContainsCode(String theSystemString, org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent theNext, String theCode, List<VersionIndependentConcept> theListToPopulate) {
boolean foundCodeInChild = false;
for (org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent nextChild : theNext.getConcept()) {
foundCodeInChild |= addTreeIfItContainsCode(theSystemString, nextChild, theCode, theListToPopulate);
}
if (theCode.equals(theNext.getCode()) || foundCodeInChild) {
theListToPopulate.add(new VersionIndependentConcept(theSystemString, theNext.getCode()));
return true;
}
return false;
}
@Override
protected IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) {
throw new UnsupportedOperationException();
}
@Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
throw new UnsupportedOperationException();
}
private void findCodesAbove(org.hl7.fhir.instance.model.ValueSet theSystem, String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate) {
List<org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent> conceptList = theSystem.getCodeSystem().getConcept();
for (org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent next : conceptList) {
addTreeIfItContainsCode(theSystemString, next, theCode, theListToPopulate);
}
}
@Override
public List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
org.hl7.fhir.instance.model.ValueSet system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
if (system != null) {
findCodesAbove(system, theSystem, theCode, retVal);
}
return retVal;
}
private void findCodesBelow(org.hl7.fhir.instance.model.ValueSet theSystem, String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate) {
List<org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent> conceptList = theSystem.getCodeSystem().getConcept();
findCodesBelow(theSystemString, theCode, theListToPopulate, conceptList);
}
private void findCodesBelow(String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate, List<org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent> conceptList) {
for (org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent next : conceptList) {
if (theCode.equals(next.getCode())) {
addAllChildren(theSystemString, next, theListToPopulate);
} else {
findCodesBelow(theSystemString, theCode, theListToPopulate, next.getConcept());
}
}
}
@Override
public List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
org.hl7.fhir.instance.model.ValueSet system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
if (system != null) {
findCodesBelow(system, theSystem, theCode, retVal);
}
return retVal;
}
@Override
public void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
throw new UnsupportedOperationException();
}
@Override
protected void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails) {
throw new UnsupportedOperationException();
}
}

View File

@ -1,44 +1,34 @@
package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.lucene.search.Query;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.StructureDefinition;
import org.hl7.fhir.dstu3.model.ValueSet.*;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.beans.factory.annotation.Qualifier;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
@ -61,9 +51,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* #L%
*/
public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTerminologySvcDstu3 {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcDstu3.class);
private final VersionConvertor_30_40 myConverter;
public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implements IValidationSupport, IHapiTerminologySvcDstu3 {
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@ -71,10 +59,11 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
protected FhirContext myContext;
@Autowired
protected ITermCodeSystemDao myCodeSystemDao;
@Autowired
@Qualifier("myValueSetDaoDstu3")
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
@Autowired
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemResourceDao;
@Autowired
private IValidationSupport myValidationSupport;
@Autowired
@ -84,19 +73,22 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
* Constructor
*/
public HapiTerminologySvcDstu3() {
myConverter = new VersionConvertor_30_40();
super();
}
@Override
public List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
if (system != null) {
findCodesBelow(system, theSystem, theCode, retVal);
protected void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet, RequestDetails theRequestDetails) {
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
ValueSet valueSetDstu3;
try {
valueSetDstu3 = VersionConvertor_30_40.convertValueSet(theValueSet);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
return retVal;
myValueSetResourceDao.update(valueSetDstu3, matchUrl, theRequestDetails);
}
private void addAllChildren(String theSystemString, ConceptDefinitionComponent theCode, List<VersionIndependentConcept> theListToPopulate) {
if (isNotBlank(theCode.getCode())) {
theListToPopulate.add(new VersionIndependentConcept(theSystemString, theCode.getCode()));
@ -106,31 +98,6 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
}
}
private void addCodeIfNotAlreadyAdded(String system, ValueSetExpansionComponent retVal, Set<String> addedCodes, TermConcept nextConcept) {
if (addedCodes.add(nextConcept.getCode())) {
ValueSetExpansionContainsComponent contains = retVal.addContains();
contains.setCode(nextConcept.getCode());
contains.setSystem(system);
contains.setDisplay(nextConcept.getDisplay());
}
}
private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery());
}
private void addDisplayFilterInexact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
Query textQuery = qb
.phrase()
.withSlop(2)
.onField("myDisplay").boostedTo(4.0f)
.andField("myDisplayEdgeNGram").boostedTo(2.0f)
// .andField("myDisplayNGram").boostedTo(1.0f)
// .andField("myDisplayPhonetic").boostedTo(0.5f)
.sentence(nextFilter.getValue().toLowerCase()).createQuery();
bool.must(textQuery);
}
private boolean addTreeIfItContainsCode(String theSystemString, ConceptDefinitionComponent theNext, String theCode, List<VersionIndependentConcept> theListToPopulate) {
boolean foundCodeInChild = false;
for (ConceptDefinitionComponent nextChild : theNext.getConcept()) {
@ -146,116 +113,31 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
}
@Override
public List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
if (system != null) {
findCodesAbove(system, theSystem, theCode, retVal);
protected IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) {
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
CodeSystem resourceToStore;
try {
resourceToStore = VersionConvertor_30_40.convertCodeSystem(theCodeSystemResource);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
return retVal;
return myCodeSystemResourceDao.update(resourceToStore, matchUrl, theRequestDetails).getId();
}
@Override
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
String system = theInclude.getSystem();
ourLog.info("Starting expansion around code system: {}", system);
ValueSet valueSetToExpand = new ValueSet();
valueSetToExpand.getCompose().addInclude(theInclude);
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
TermCodeSystemVersion csv = cs.getCurrentVersion();
ValueSetExpansionComponent retVal = new ValueSetExpansionComponent();
Set<String> addedCodes = new HashSet<>();
boolean haveIncludeCriteria = false;
/*
* Include Concepts
*/
for (ConceptReferenceComponent next : theInclude.getConcept()) {
String nextCode = next.getCode();
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
haveIncludeCriteria = true;
TermConcept code = myTerminologySvc.findCode(system, nextCode);
if (code != null) {
addedCodes.add(nextCode);
ValueSetExpansionContainsComponent contains = retVal.addContains();
contains.setCode(nextCode);
contains.setSystem(system);
contains.setDisplay(code.getDisplay());
try {
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = super.expandValueSet(valueSetToExpandR4).getExpansion();
return VersionConvertor_30_40.convertValueSetExpansionComponent(expandedR4);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
}
}
/*
* Filters
*/
if (theInclude.getFilter().size() > 0) {
haveIncludeCriteria = true;
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
BooleanJunction<?> bool = qb.bool();
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
for (ConceptSetFilterComponent nextFilter : theInclude.getFilter()) {
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
continue;
}
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
}
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == FilterOperator.EQUAL) {
addDisplayFilterExact(qb, bool, nextFilter);
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == FilterOperator.EQUAL) {
if (nextFilter.getValue().trim().contains(" ")) {
addDisplayFilterExact(qb, bool, nextFilter);
} else {
addDisplayFilterInexact(qb, bool, nextFilter);
}
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == FilterOperator.ISA) {
TermConcept code = myTerminologySvc.findCode(system, nextFilter.getValue());
if (code == null) {
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
}
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
} else {
throw new InvalidRequestException("Unknown filter property[" + nextFilter + "] + op[" + nextFilter.getOpElement().getValueAsString() + "]");
}
}
Query luceneQuery = bool.createQuery();
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
jpaQuery.setMaxResults(1000);
StopWatch sw = new StopWatch();
@SuppressWarnings("unchecked")
List<TermConcept> result = jpaQuery.getResultList();
ourLog.info("Expansion completed in {}ms", sw.getMillis());
for (TermConcept nextConcept : result) {
addCodeIfNotAlreadyAdded(system, retVal, addedCodes, nextConcept);
}
retVal.setTotal(jpaQuery.getResultSize());
}
if (!haveIncludeCriteria) {
List<TermConcept> allCodes = myTerminologySvc.findCodes(system);
for (TermConcept nextConcept : allCodes) {
addCodeIfNotAlreadyAdded(system, retVal, addedCodes, nextConcept);
}
}
return retVal;
}
@Override
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
@ -278,29 +160,6 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
return null;
}
// @Override
// public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
// ValueSet source = new ValueSet();
// source.getCompose().addInclude().addValueSet(theValueSet);
// try {
// ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
//
// HapiWorkerContext worker = new HapiWorkerContext(myContext, myValidationSupport);
// ValueSetExpansionOutcome outcome = worker.expand(source, null);
// for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
// retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
// }
//
// return retVal;
//
// } catch (BaseServerResponseException e) {
// throw e;
// } catch (Exception e) {
// throw new InternalErrorException(e);
// }
//
// }
@CoverageIgnore
@Override
public StructureDefinition fetchStructureDefinition(FhirContext theCtx, String theUrl) {
@ -314,6 +173,16 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
}
}
@Override
public List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
if (system != null) {
findCodesAbove(system, theSystem, theCode, retVal);
}
return retVal;
}
private void findCodesBelow(CodeSystem theSystem, String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate) {
List<ConceptDefinitionComponent> conceptList = theSystem.getConcept();
findCodesBelow(theSystemString, theCode, theListToPopulate, conceptList);
@ -330,34 +199,18 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
}
@Override
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
return myTerminologySvc.supportsSystem(theSystem);
public List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
if (system != null) {
findCodesBelow(system, theSystem, theCode, retVal);
}
return retVal;
}
@Override
@org.springframework.transaction.annotation.Transactional(propagation = Propagation.REQUIRED)
public void storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
CodeSystem cs = new org.hl7.fhir.dstu3.model.CodeSystem();
cs.setUrl(theCodeSystemResource.getUrl());
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
DaoMethodOutcome createOutcome = myCodeSystemResourceDao.create(cs, "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()), theRequestDetails);
IIdType csId = createOutcome.getId().toUnqualifiedVersionless();
if (createOutcome.getCreated() != Boolean.TRUE) {
CodeSystem existing = myCodeSystemResourceDao.read(csId, theRequestDetails);
csId = myCodeSystemResourceDao.update(existing, null, false, true, theRequestDetails).getId();
ourLog.info("Created new version of CodeSystem, got ID: {}", csId.toUnqualified().getValue());
}
ResourceTable resource = (ResourceTable) myCodeSystemResourceDao.readEntity(csId);
Long codeSystemResourcePid = resource.getId();
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
theCodeSystemVersion.setResource(resource);
myTerminologySvc.storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemVersion);
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
return myTerminologySvc.supportsSystem(theSystem);
}
@CoverageIgnore
@ -371,7 +224,8 @@ public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTermino
return new CodeValidationResult(def);
}
return new CodeValidationResult(IssueSeverity.ERROR, "Unkonwn code {" + theCodeSystem + "}" + theCode);
return new CodeValidationResult(IssueSeverity.ERROR, "Unknown code {" + theCodeSystem + "}" + theCode);
}
}

View File

@ -3,21 +3,10 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.lang3.Validate;
import org.apache.lucene.search.Query;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
@ -25,19 +14,19 @@ import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.r4.model.ValueSet.*;
import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
@ -60,8 +49,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* #L%
*/
public class HapiTerminologySvcR4 implements IHapiTerminologySvcR4 {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcR4.class);
public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements IHapiTerminologySvcR4 {
@Autowired
protected ITermCodeSystemDao myCodeSystemDao;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
@ -70,6 +58,9 @@ public class HapiTerminologySvcR4 implements IHapiTerminologySvcR4 {
@Qualifier("myCodeSystemDaoR4")
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
@Autowired
@Qualifier("myValueSetDaoR4")
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
@Autowired
private IValidationSupport myValidationSupport;
@Autowired
private IHapiTerminologySvc myTerminologySvc;
@ -85,30 +76,6 @@ public class HapiTerminologySvcR4 implements IHapiTerminologySvcR4 {
}
}
private void addCodeIfNotAlreadyAdded(String system, ValueSetExpansionComponent retVal, Set<String> addedCodes, TermConcept nextConcept) {
if (addedCodes.add(nextConcept.getCode())) {
ValueSetExpansionContainsComponent contains = retVal.addContains();
contains.setCode(nextConcept.getCode());
contains.setSystem(system);
contains.setDisplay(nextConcept.getDisplay());
}
}
private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery());
}
private void addDisplayFilterInexact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
Query textQuery = qb
.phrase()
.withSlop(2)
.onField("myDisplay").boostedTo(4.0f)
.andField("myDisplayEdgeNGram").boostedTo(2.0f)
// .andField("myDisplayNGram").boostedTo(1.0f)
// .andField("myDisplayPhonetic").boostedTo(0.5f)
.sentence(nextFilter.getValue().toLowerCase()).createQuery();
bool.must(textQuery);
}
private boolean addTreeIfItContainsCode(String theSystemString, ConceptDefinitionComponent theNext, String theCode, List<VersionIndependentConcept> theListToPopulate) {
boolean foundCodeInChild = false;
@ -124,130 +91,24 @@ public class HapiTerminologySvcR4 implements IHapiTerminologySvcR4 {
return false;
}
@Override
protected IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) {
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl, theRequestDetails).getId();
}
@Override
protected void createOrUpdateValueSet(ValueSet theValueSet, RequestDetails theRequestDetails) {
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
myValueSetResourceDao.update(theValueSet, matchUrl, theRequestDetails);
}
@Override
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
String system = theInclude.getSystem();
ourLog.info("Starting expansion around code system: {}", system);
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
TermCodeSystemVersion csv = cs.getCurrentVersion();
ValueSetExpansionComponent retVal = new ValueSetExpansionComponent();
Set<String> addedCodes = new HashSet<>();
boolean haveIncludeCriteria = false;
/*
* Include Concepts
*/
for (ConceptReferenceComponent next : theInclude.getConcept()) {
String nextCode = next.getCode();
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
haveIncludeCriteria = true;
TermConcept code = myTerminologySvc.findCode(system, nextCode);
if (code != null) {
addedCodes.add(nextCode);
ValueSetExpansionContainsComponent contains = retVal.addContains();
contains.setCode(nextCode);
contains.setSystem(system);
contains.setDisplay(code.getDisplay());
ValueSet valueSetToExpand = new ValueSet();
valueSetToExpand.getCompose().addInclude(theInclude);
return super.expandValueSet(valueSetToExpand).getExpansion();
}
}
}
/*
* Filters
*/
if (theInclude.getFilter().size() > 0) {
haveIncludeCriteria = true;
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
BooleanJunction<?> bool = qb.bool();
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
for (ConceptSetFilterComponent nextFilter : theInclude.getFilter()) {
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
continue;
}
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
}
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == FilterOperator.EQUAL) {
addDisplayFilterExact(qb, bool, nextFilter);
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == FilterOperator.EQUAL) {
if (nextFilter.getValue().trim().contains(" ")) {
addDisplayFilterExact(qb, bool, nextFilter);
} else {
addDisplayFilterInexact(qb, bool, nextFilter);
}
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == FilterOperator.ISA) {
TermConcept code = myTerminologySvc.findCode(system, nextFilter.getValue());
if (code == null) {
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
}
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
} else {
throw new InvalidRequestException("Unknown filter property[" + nextFilter + "] + op[" + nextFilter.getOpElement().getValueAsString() + "]");
}
}
Query luceneQuery = bool.createQuery();
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
jpaQuery.setMaxResults(1000);
StopWatch sw = new StopWatch();
@SuppressWarnings("unchecked")
List<TermConcept> result = jpaQuery.getResultList();
ourLog.info("Expansion completed in {}ms", sw.getMillis());
for (TermConcept nextConcept : result) {
addCodeIfNotAlreadyAdded(system, retVal, addedCodes, nextConcept);
}
retVal.setTotal(jpaQuery.getResultSize());
}
if (!haveIncludeCriteria) {
List<TermConcept> allCodes = myTerminologySvc.findCodes(system);
for (TermConcept nextConcept : allCodes) {
addCodeIfNotAlreadyAdded(system, retVal, addedCodes, nextConcept);
}
}
return retVal;
}
// @Override
// public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
// ValueSet source = new ValueSet();
// source.getCompose().addInclude().addValueSet(theValueSet);
// try {
// ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
//
// HapiWorkerContext worker = new HapiWorkerContext(myContext, myValidationSupport);
// ValueSetExpansionOutcome outcome = worker.expand(source, null);
// for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
// retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
// }
//
// return retVal;
//
// } catch (BaseServerResponseException e) {
// throw e;
// } catch (Exception e) {
// throw new InternalErrorException(e);
// }
//
// }
@Override
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
@ -323,23 +184,6 @@ public class HapiTerminologySvcR4 implements IHapiTerminologySvcR4 {
return myTerminologySvc.supportsSystem(theSystem);
}
@Override
@Transactional(propagation = Propagation.REQUIRED)
public void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
IIdType csId = myCodeSystemResourceDao.update(theCodeSystemResource, "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()), theRequestDetails).getId();
ResourceTable resource = (ResourceTable) myCodeSystemResourceDao.readEntity(csId);
Long codeSystemResourcePid = resource.getId();
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
theCodeSystemVersion.setResource(resource);
myTerminologySvc.storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemVersion);
}
@CoverageIgnore
@Override
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
@ -60,4 +61,10 @@ public interface IHapiTerminologySvc {
boolean supportsSystem(String theCodeSystem);
}
List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode);
List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode);
void storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets);
}

View File

@ -22,6 +22,6 @@ package ca.uhn.fhir.jpa.term;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
public interface IHapiTerminologySvcDstu3 extends IValidationSupport, IVersionSpecificValidationSupport {
public interface IHapiTerminologySvcDstu3 extends IHapiTerminologySvc, IValidationSupport {
// nothing
}

View File

@ -22,6 +22,6 @@ package ca.uhn.fhir.jpa.term;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
public interface IHapiTerminologySvcR4 extends IValidationSupport {
public interface IHapiTerminologySvcR4 extends IHapiTerminologySvc, IValidationSupport {
// nothing
}

View File

@ -1,18 +0,0 @@
package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
public interface IVersionSpecificValidationSupport {
List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode);
List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode);
void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets);
}

View File

@ -4,9 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincHierarchyHandler;
import ca.uhn.fhir.jpa.term.loinc.*;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
@ -62,6 +60,9 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
public static final String LOINC_HIERARCHY_FILE = "MULTI-AXIAL_HIERARCHY.CSV";
public static final String LOINC_ANSWERLIST_FILE = "AnswerList_Beta_1.csv";
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink_Beta_1.csv";
public static final String LOINC_PART_FILE = "Part_Beta_1.csv";
public static final String LOINC_PART_LINK_FILE = "LoincPartLink_Beta_1.csv";
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping_Beta_1.csv";
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
@ -193,7 +194,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
CodeSystem loincCs;
try {
String loincCsString = IOUtils.toString(HapiTerminologySvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/loinc/loinc.xml"), Charsets.UTF_8);
String loincCsString = IOUtils.toString(BaseHapiTerminologySvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/loinc/loinc.xml"), Charsets.UTF_8);
loincCs = FhirContext.forR4().newXmlParser().parseResource(CodeSystem.class, loincCsString);
} catch (IOException e) {
throw new InternalErrorException("Failed to load loinc.xml", e);
@ -220,6 +221,18 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, propertyNames, valueSets);
iterateOverZipFile(theZipBytes, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC);
// Answer list links (connects loinc observation codes to answerlist codes)
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
iterateOverZipFile(theZipBytes, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
// Part file
handler = new LoincPartHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theZipBytes, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC);
// Part link file
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theZipBytes, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
theZipBytes.clear();
for (Entry<String, TermConcept> next : code2concept.entrySet()) {

View File

@ -0,0 +1,72 @@
package ca.uhn.fhir.jpa.term.loinc;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincAnswerListLinkHandler implements IRecordHandler {
private final Map<String, TermConcept> myCode2Concept;
private final Map<String, ValueSet> myIdToValueSet = new HashMap<>();
public LoincAnswerListLinkHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets) {
myCode2Concept = theCode2concept;
for (ValueSet next : theValueSets) {
myIdToValueSet.put(next.getId(), next);
}
}
@Override
public void accept(CSVRecord theRecord) {
String applicableContext = trim(theRecord.get("ApplicableContext"));
/*
* Per Dan V's Notes:
*
* Note: in our current format, we support binding of the same
* LOINC term to different answer lists depending on the panel
* context. I dont believe theres a way to handle that in
* the current FHIR spec, so I might suggest we discuss either
* only binding the default (non-context specific) list or
* if multiple bindings could be supported.
*/
if (isNotBlank(applicableContext)) {
return;
}
String answerListId = trim(theRecord.get("AnswerListId"));
if (isBlank(answerListId)) {
return;
}
String loincNumber = trim(theRecord.get("LoincNumber"));
if (isBlank(loincNumber)) {
return;
}
TermConcept loincCode = myCode2Concept.get(loincNumber);
if (loincCode != null) {
loincCode.addProperty("answer-list", answerListId);
}
TermConcept answerListCode = myCode2Concept.get(answerListId);
if (answerListCode != null) {
answerListCode.addProperty("answers-for", loincNumber);
}
}
}

View File

@ -0,0 +1,45 @@
package ca.uhn.fhir.jpa.term.loinc;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.HashMap;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincPartHandler implements IRecordHandler {
private final Map<String, TermConcept> myCode2Concept;
private final TermCodeSystemVersion myCodeSystemVersion;
private final Map<String, ValueSet> myIdToValueSet = new HashMap<>();
public LoincPartHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept) {
myCodeSystemVersion = theCodeSystemVersion;
myCode2Concept = theCode2concept;
}
@Override
public void accept(CSVRecord theRecord) {
// this is the code for the list (will repeat)
String partNumber = trim(theRecord.get("PartNumber"));
String partTypeName = trim(theRecord.get("PartTypeName"));
String partName = trim(theRecord.get("PartName"));
String partDisplayName = trim(theRecord.get("PartDisplayName"));
String status = trim(theRecord.get("Status"));
if (!"ACTIVE".equals(status)) {
return;
}
TermConcept concept = new TermConcept(myCodeSystemVersion, partNumber);
concept.setDisplay(partName);
myCode2Concept.put(partDisplayName, concept);
}
}

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.jpa.term.loinc;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ValueSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincPartLinkHandler implements IRecordHandler {
private final Map<String, TermConcept> myCode2Concept;
private final TermCodeSystemVersion myCodeSystemVersion;
public LoincPartLinkHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept) {
myCodeSystemVersion = theCodeSystemVersion;
myCode2Concept = theCode2concept;
}
@Override
public void accept(CSVRecord theRecord) {
String loincNumber = trim(theRecord.get("LoincNumber"));
String longCommonName = trim(theRecord.get("LongCommonName"));
String partNumber = trim(theRecord.get("PartNumber"));
String partDisplayName = trim(theRecord.get("PartDisplayName"));
String status = trim(theRecord.get("Status"));
TermConcept loincConcept = myCode2Concept.get(loincNumber);
TermConcept partConcept = myCode2Concept.get(partNumber);
if (loincConcept==null) {
ourLog.warn("No loinc code: {}", loincNumber);
return;
}
if (partConcept==null) {
ourLog.warn("No part code: {}", partNumber);
return;
}
partConcept.addProperty();
}
private static final Logger ourLog = LoggerFactory.getLogger(LoincPartLinkHandler.class);
}

View File

@ -4,7 +4,7 @@ import static org.junit.Assert.assertNotEquals;
import java.nio.charset.StandardCharsets;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.junit.AfterClass;
@ -18,13 +18,13 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
}
@Test
public void testIndexContained() throws Exception {
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
String input = IOUtils.toString(getClass().getResource("/dstu3_codesystem_complete.json"), StandardCharsets.UTF_8);
CodeSystem cs = myFhirCtx.newJsonParser().parseResource(CodeSystem.class, input);

View File

@ -9,7 +9,7 @@ import static org.junit.Assert.fail;
import java.util.*;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.AllergyIntolerance.AllergyIntoleranceCategory;
import org.hl7.fhir.dstu3.model.AllergyIntolerance.AllergyIntoleranceClinicalStatus;
@ -48,7 +48,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
public void after() {
myDaoConfig.setDeferIndexingForCodesystemsOfSize(new DaoConfig().getDeferIndexingForCodesystemsOfSize());
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
}
@Before
@ -483,7 +483,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
@Test
public void testExpandWithIsAInExternalValueSetReindex() {
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
createExternalCsAndLocalVs();

View File

@ -4,12 +4,12 @@ import static org.junit.Assert.assertNotEquals;
import java.nio.charset.StandardCharsets;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.r4.model.CodeSystem;
import org.junit.AfterClass;
import org.junit.Test;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
import ca.uhn.fhir.util.TestUtil;
public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
@ -18,13 +18,13 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
}
@Test
public void testIndexContained() throws Exception {
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
String input = IOUtils.toString(getClass().getResource("/r4/codesystem_complete.json"), StandardCharsets.UTF_8);
CodeSystem cs = myFhirCtx.newJsonParser().parseResource(CodeSystem.class, input);

View File

@ -9,7 +9,7 @@ import static org.junit.Assert.fail;
import java.util.*;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.AllergyIntolerance.AllergyIntoleranceCategory;
import org.hl7.fhir.r4.model.AllergyIntolerance.AllergyIntoleranceClinicalStatus;
@ -48,7 +48,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
public void after() {
myDaoConfig.setDeferIndexingForCodesystemsOfSize(new DaoConfig().getDeferIndexingForCodesystemsOfSize());
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
}
@Before
@ -483,7 +483,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
@Test
public void testExpandWithIsAInExternalValueSetReindex() {
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
createExternalCsAndLocalVs();

View File

@ -28,6 +28,7 @@ import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import static org.hamcrest.Matchers.contains;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyListOf;
@ -88,6 +89,7 @@ public class TerminologyLoaderSvcLoincTest {
addFile("/loinc/", "loinc.csv", TerminologyLoaderSvc.LOINC_FILE);
addFile("/loinc/", "hierarchy.csv", TerminologyLoaderSvc.LOINC_HIERARCHY_FILE);
addFile("/loinc/", "AnswerList_Beta_1.csv", TerminologyLoaderSvc.LOINC_ANSWERLIST_FILE);
addFile("/loinc/", "LoincAnswerListLink_Beta_1.csv", TerminologyLoaderSvc.LOINC_ANSWERLIST_LINK_FILE);
// Actually do the load
mySvc.loadLoinc(myFiles, details);
@ -108,6 +110,10 @@ public class TerminologyLoaderSvcLoincTest {
assertEquals("Pt", code.getProperty("TIME_ASPCT"));
assertEquals("R' wave amplitude in lead I", code.getDisplay());
// Loinc code with answer
code = concepts.get("61438-8");
assertThat(code.getProperties("answer-list"), contains("LL1000-0"));
// Answer list
code = concepts.get("LL1001-8");
assertEquals("LL1001-8", code.getCode());
@ -119,6 +125,11 @@ public class TerminologyLoaderSvcLoincTest {
assertEquals("1-2 times per week", code.getDisplay());
assertEquals(3, code.getSequence().intValue());
// Answer list code with link to answers-for
code = concepts.get("LL1000-0");
assertThat(code.getProperties("answers-for"), contains("61438-8"));
// AnswerList valueSet
Map<String, ValueSet> valueSets = new HashMap<>();
for (ValueSet next : myValueSetsCaptor.getValue()) {
@ -130,11 +141,16 @@ public class TerminologyLoaderSvcLoincTest {
assertEquals("PhenX05_14_30D freq amts", vs.getName());
assertEquals("urn:oid:1.3.6.1.4.1.12009.10.1.166", vs.getUrl());
assertEquals(1, vs.getCompose().getInclude().size());
assertEquals(6, vs.getCompose().getInclude().get(0).getConcept().size());
assertEquals(7, vs.getCompose().getInclude().get(0).getConcept().size());
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, vs.getCompose().getInclude().get(0).getSystem());
assertEquals("LA6270-8", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
assertEquals("Never", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
// Part
code = concepts.get("LP101394-7");
assertEquals("LP101394-7", code.getCode());
assertEquals("adjusted for maternal weight", code.getDisplay());
}

View File

@ -1,4 +1,4 @@
"AnswerListId","AnswerListName" ,"AnswerListOID" ,"ExtDefinedYN","ExtDefinedAnswerListCodeSystem","ExtDefinedAnswerListLink","AnswerStringId","LocalAnswerCode","LocalAnswerCodeSystem","SequenceNumber","DisplayText" ,"ExtCodeId","ExtCodeDisplayName","ExtCodeSystem","ExtCodeSystemVersion","ExtCodeSystemCopyrightNotice","SubsequentTextPrompt","Description","Score"
"AnswerListId","AnswerListName" ,"AnswerListOID" ,"ExtDefinedYN","ExtDefinedAnswerListCodeSystem","ExtDefinedAnswerListLink","AnswerStringId","LocalAnswerCode","LocalAnswerCodeSystem","SequenceNumber","DisplayText" ,"ExtCodeId","ExtCodeDisplayName" ,"ExtCodeSystem" ,"ExtCodeSystemVersion" ,"ExtCodeSystemCopyrightNotice" ,"SubsequentTextPrompt","Description","Score"
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13825-7" ,"1" , ,1 ,"1 slice or 1 dinner roll" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13838-0" ,"2" , ,2 ,"2 slices or 2 dinner rolls" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13892-7" ,"3" , ,3 ,"More than 2 slices or 2 dinner rolls", , , , , , , ,
@ -8,3 +8,4 @@
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13853-9" ,"03" , ,4 ,"3-4 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13860-4" ,"04" , ,5 ,"5-6 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13827-3" ,"05" , ,6 ,"1 time per day" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA4389-8" ,"97" , ,11 ,"Refused" ,"443390004","Refused (qualifier value)","http://snomed.info/sct","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College", , ,

Can't render this file because it contains an unexpected character in line 1 and column 31.

View File

@ -1,4 +1,5 @@
"LoincNumber","LongCommonName" ,"AnswerListId","AnswerListName" ,"AnswerListLinkType","ApplicableContext"
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]","LL1000-0" ,"PhenX05_13_30D bread amt","NORMATIVE" ,
"10061-0" ,"S' wave amplitude in lead I" ,"LL1311-1" ,"PhenX12_44" ,"EXAMPLE" ,
"10331-7" ,"Rh [Type] in Blood" ,"LL360-9" ,"Pos|Neg" ,"EXAMPLE" ,
"10389-5" ,"Blood product.other [Type]" ,"LL2413-4" ,"Othr bld prod" ,"EXAMPLE" ,
@ -8,4 +9,3 @@
"10401-8" ,"Immune serum globulin given [Type]" ,"LL2421-7" ,"IM/IV" ,"EXAMPLE" ,
"10410-9" ,"Plasma given [Type]" ,"LL2417-5" ,"Plasma type" ,"EXAMPLE" ,
"10568-4" ,"Clarity of Semen" ,"LL2427-4" ,"Clear/Opales/Milky" ,"EXAMPLE" ,
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]","LL1000-0" ,"PhenX05_13_30D bread amt","NORMATIVE" ,

Can't render this file because it contains an unexpected character in line 1 and column 30.

View File

@ -0,0 +1,10 @@
"LoincNumber","LongCommonName","PartNumber","PartName","PartTypeName","LinkTypeName"
"10000-8","R wave duration in lead AVR","LP31088-5","R wave duration.lead AVR","COMPONENT","Primary"
"10000-8","R wave duration in lead AVR","LP6244-0","EKG","METHOD","Primary"
"10000-8","R wave duration in lead AVR","LP6879-3","Time","PROPERTY","Primary"
"10000-8","R wave duration in lead AVR","LP6960-1","Pt","TIME","Primary"
"10000-8","R wave duration in lead AVR","LP7289-4","Heart","SYSTEM","Primary"
"10000-8","R wave duration in lead AVR","LP7753-9","Qn","SCALE","Primary"
"10000-8","R wave duration in lead AVR","LP7795-0","EKG.MEAS","CLASS","Primary"
"10000-8","R wave duration in lead AVR","LP14259-3","Lead","COMPONENT","Search"
"10000-8","R wave duration in lead AVR","LP14744-4","Duration","COMPONENT","Search"
1 LoincNumber LongCommonName PartNumber PartName PartTypeName LinkTypeName
2 10000-8 R wave duration in lead AVR LP31088-5 R wave duration.lead AVR COMPONENT Primary
3 10000-8 R wave duration in lead AVR LP6244-0 EKG METHOD Primary
4 10000-8 R wave duration in lead AVR LP6879-3 Time PROPERTY Primary
5 10000-8 R wave duration in lead AVR LP6960-1 Pt TIME Primary
6 10000-8 R wave duration in lead AVR LP7289-4 Heart SYSTEM Primary
7 10000-8 R wave duration in lead AVR LP7753-9 Qn SCALE Primary
8 10000-8 R wave duration in lead AVR LP7795-0 EKG.MEAS CLASS Primary
9 10000-8 R wave duration in lead AVR LP14259-3 Lead COMPONENT Search
10 10000-8 R wave duration in lead AVR LP14744-4 Duration COMPONENT Search

View File

@ -0,0 +1,9 @@
"PartNumber","PartTypeName","PartName","PartDisplayName","Status"
"LP101394-7","ADJUSTMENT","adjusted for maternal weight","adjusted for maternal weight","ACTIVE"
"LP101907-6","ADJUSTMENT","corrected for age","corrected for age","ACTIVE"
"LP115711-6","ADJUSTMENT","corrected for background","corrected for background","ACTIVE"
"LP147359-6","ADJUSTMENT","adjusted for body weight","adjusted for body weight","ACTIVE"
"LP173482-3","ADJUSTMENT","1st specimen","1st specimen","DEPRECATED"
"LP173483-1","ADJUSTMENT","post cyanocobalamin",,"ACTIVE"
"LP173484-9","ADJUSTMENT","W hyperextension)",,"ACTIVE"
"LP6244-0","METHOD","EKG","Electrocardiogram (EKG)","ACTIVE"
1 PartNumber PartTypeName PartName PartDisplayName Status
2 LP101394-7 ADJUSTMENT adjusted for maternal weight adjusted for maternal weight ACTIVE
3 LP101907-6 ADJUSTMENT corrected for age corrected for age ACTIVE
4 LP115711-6 ADJUSTMENT corrected for background corrected for background ACTIVE
5 LP147359-6 ADJUSTMENT adjusted for body weight adjusted for body weight ACTIVE
6 LP173482-3 ADJUSTMENT 1st specimen 1st specimen DEPRECATED
7 LP173483-1 ADJUSTMENT post cyanocobalamin ACTIVE
8 LP173484-9 ADJUSTMENT W hyperextension) ACTIVE
9 LP6244-0 METHOD EKG Electrocardiogram (EKG) ACTIVE

View File

@ -9,3 +9,4 @@
"10019-8" ,"R' wave amplitude.lead V4" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V4; R wave Amp L-V4; Random; Right; Voltage" ,"R' wave Amp L-V4" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V4" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
"10020-6" ,"R' wave amplitude.lead V5" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V5; R wave Amp L-V5; Random; Right; Voltage" ,"R' wave Amp L-V5" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V5" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30D","Find" ,"Pt" ,"^Patient" ,"Ord" ,"PhenX" ,"PHENX" ,"PhenX" ,"2.44" ,"MIN" , ,"TRIAL" , ,2 , , , ,"Each time you eat bread, toast or dinner rolls, how much do you usually eat?","PhenX.050201100100","N" , ,"Finding; Findings; How much bread in 30D; Last; Ordinal; Point in time; QL; Qual; Qualitative; Random; Screen" ,"How much bread in 30D PhenX", , , , , ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]", , , , , , , ,0 ,0 ,0 , , , , ,
"10000-8" ,"R wave duration.lead AVR" ,"Time" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; Durat; ECG; EKG.MEASUREMENTS; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave dur L-AVR; R wave dur L-AVR; Random; Right" ,"R wave dur L-AVR" ,"Observation", , , ,"s" ,"R wave duration in lead AVR" , , ,"s" , , , , ,0 ,0 ,0 , , , , ,

Can't render this file because it contains an unexpected character in line 1 and column 23.