Lots of addition of loinc
This commit is contained in:
parent
e31aa760dd
commit
611ee457cb
|
@ -0,0 +1,4 @@
|
|||
Database migration:
|
||||
update table TRM_CODESYSTEM_VER drop column RES_VERSION_ID;
|
||||
|
||||
|
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.context.support;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -25,6 +25,17 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This interface is a version-independent representation of the
|
||||
* various functions that can be provided by validation and terminology
|
||||
* services.
|
||||
* <p>
|
||||
* Implementations are not required to implement all of the functions
|
||||
* in this interface; in fact it is expected that most won't. Any
|
||||
* methods which are not implemented may simply return <code>null</code>
|
||||
* and calling code is expected to be able to handle this.
|
||||
* </p>
|
||||
*/
|
||||
public interface IContextValidationSupport<EVS_IN, EVS_OUT, SDT, CST, CDCT, IST> {
|
||||
|
||||
/**
|
||||
|
@ -91,7 +102,7 @@ public interface IContextValidationSupport<EVS_IN, EVS_OUT, SDT, CST, CDCT, IST>
|
|||
*/
|
||||
CodeValidationResult<CDCT, IST> validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay);
|
||||
|
||||
public class CodeValidationResult<CDCT, IST> {
|
||||
class CodeValidationResult<CDCT, IST> {
|
||||
private CDCT definition;
|
||||
private String message;
|
||||
private IST severity;
|
||||
|
|
|
@ -91,7 +91,7 @@ ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number pr
|
|||
|
||||
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
|
||||
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
|
||||
|
|
|
@ -10,10 +10,7 @@ import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
|
|||
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamExtractorDstu3;
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamRegistryDstu3;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.*;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
|
@ -113,7 +110,12 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvcDstu3 terminologyService() {
|
||||
public IHapiTerminologySvc terminologyService() {
|
||||
return new HapiTerminologySvcImpl();
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvcDstu3 terminologyServiceDstu3() {
|
||||
return new HapiTerminologySvcDstu3();
|
||||
}
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@ public class BaseR4Config extends BaseConfig {
|
|||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvcR4 terminologyService() {
|
||||
public IHapiTerminologySvcR4 terminologyServiceR4() {
|
||||
return new HapiTerminologySvcR4();
|
||||
}
|
||||
|
||||
|
|
|
@ -232,7 +232,6 @@ public class FhirResourceDaoCodeSystemDstu3 extends FhirResourceDaoDstu3<CodeSys
|
|||
|
||||
persCs = new TermCodeSystemVersion();
|
||||
persCs.setResource(retVal);
|
||||
persCs.setResourceVersionId(retVal.getVersion());
|
||||
persCs.getConcepts().addAll(toPersistedConcepts(cs.getConcept(), persCs));
|
||||
ourLog.info("Code system has {} concepts", persCs.getConcepts().size());
|
||||
myTerminologySvc.storeNewCodeSystemVersion(codeSystemResourcePid, codeSystemUrl, persCs);
|
||||
|
|
|
@ -95,7 +95,7 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4<CodeSystem> i
|
|||
public List<IIdType> findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem) {
|
||||
List<IIdType> valueSetIds;
|
||||
Set<Long> ids = searchForIds(new SearchParameterMap(CodeSystem.SP_CODE, new TokenParam(theSystem, theCode)));
|
||||
valueSetIds = new ArrayList<IIdType>();
|
||||
valueSetIds = new ArrayList<>();
|
||||
for (Long next : ids) {
|
||||
valueSetIds.add(new IdType("CodeSystem", next));
|
||||
}
|
||||
|
@ -190,7 +190,7 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4<CodeSystem> i
|
|||
}
|
||||
|
||||
private List<TermConcept> toPersistedConcepts(List<ConceptDefinitionComponent> theConcept, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
ArrayList<TermConcept> retVal = new ArrayList<TermConcept>();
|
||||
ArrayList<TermConcept> retVal = new ArrayList<>();
|
||||
|
||||
for (ConceptDefinitionComponent next : theConcept) {
|
||||
if (isNotBlank(next.getCode())) {
|
||||
|
@ -226,7 +226,6 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4<CodeSystem> i
|
|||
|
||||
persCs = new TermCodeSystemVersion();
|
||||
persCs.setResource(retVal);
|
||||
persCs.setResourceVersionId(retVal.getVersion());
|
||||
persCs.getConcepts().addAll(toPersistedConcepts(cs.getConcept(), persCs));
|
||||
ourLog.info("Code system has {} concepts", persCs.getConcepts().size());
|
||||
myTerminologySvc.storeNewCodeSystemVersion(codeSystemResourcePid, codeSystemUrl, persCs);
|
||||
|
|
|
@ -62,8 +62,8 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey=@ForeignKey(name="FK_CODESYSVER_RES_ID"))
|
||||
private ResourceTable myResource;
|
||||
|
||||
@Column(name = "RES_VERSION_ID", nullable = false, updatable = false)
|
||||
private Long myResourceVersionId;
|
||||
@Column(name = "CS_VERSION_ID", nullable = true, updatable = false)
|
||||
private String myCodeSystemVersionId;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -74,7 +74,7 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
|
||||
public Collection<TermConcept> getConcepts() {
|
||||
if (myConcepts == null) {
|
||||
myConcepts = new ArrayList<TermConcept>();
|
||||
myConcepts = new ArrayList<>();
|
||||
}
|
||||
return myConcepts;
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((myResource.getId() == null) ? 0 : myResource.getId().hashCode());
|
||||
result = prime * result + ((myResourceVersionId == null) ? 0 : myResourceVersionId.hashCode());
|
||||
result = prime * result + ((myCodeSystemVersionId == null) ? 0 : myCodeSystemVersionId.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -111,11 +111,11 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (myResourceVersionId == null) {
|
||||
if (other.myResourceVersionId != null) {
|
||||
if (myCodeSystemVersionId == null) {
|
||||
if (other.myCodeSystemVersionId != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!myResourceVersionId.equals(other.myResourceVersionId)) {
|
||||
} else if (!myCodeSystemVersionId.equals(other.myCodeSystemVersionId)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -125,16 +125,16 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
return myResource;
|
||||
}
|
||||
|
||||
public Long getResourceVersionId() {
|
||||
return myResourceVersionId;
|
||||
public String getCodeSystemVersionId() {
|
||||
return myCodeSystemVersionId;
|
||||
}
|
||||
|
||||
public void setResource(ResourceTable theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public void setResourceVersionId(Long theResourceVersionId) {
|
||||
myResourceVersionId = theResourceVersionId;
|
||||
public void setCodeSystemVersionId(String theCodeSystemVersionId) {
|
||||
myCodeSystemVersionId = theCodeSystemVersionId;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,20 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.search.DeferConceptIndexingInterceptor;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.search.annotations.*;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.persistence.*;
|
||||
import javax.persistence.Index;
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
|
@ -11,9 +26,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -22,52 +37,12 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.FetchType;
|
||||
import javax.persistence.ForeignKey;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.OneToMany;
|
||||
import javax.persistence.PrePersist;
|
||||
import javax.persistence.PreUpdate;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.UniqueConstraint;
|
||||
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.search.annotations.Analyze;
|
||||
import org.hibernate.search.annotations.Analyzer;
|
||||
import org.hibernate.search.annotations.Field;
|
||||
import org.hibernate.search.annotations.Fields;
|
||||
import org.hibernate.search.annotations.Indexed;
|
||||
import org.hibernate.search.annotations.Store;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.search.DeferConceptIndexingInterceptor;
|
||||
|
||||
@Entity
|
||||
@Indexed(interceptor=DeferConceptIndexingInterceptor.class)
|
||||
@Table(name="TRM_CONCEPT", uniqueConstraints= {
|
||||
@UniqueConstraint(name="IDX_CONCEPT_CS_CODE", columnNames= {"CODESYSTEM_PID", "CODE"})
|
||||
}, indexes= {
|
||||
@Index(name = "IDX_CONCEPT_INDEXSTATUS", columnList="INDEX_STATUS")
|
||||
@Indexed(interceptor = DeferConceptIndexingInterceptor.class)
|
||||
@Table(name = "TRM_CONCEPT", uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_CONCEPT_CS_CODE", columnNames = {"CODESYSTEM_PID", "CODE"})
|
||||
}, indexes = {
|
||||
@Index(name = "IDX_CONCEPT_INDEXSTATUS", columnList = "INDEX_STATUS")
|
||||
})
|
||||
public class TermConcept implements Serializable {
|
||||
private static final int MAX_DESC_LENGTH = 400;
|
||||
|
@ -75,11 +50,11 @@ public class TermConcept implements Serializable {
|
|||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade= {})
|
||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {})
|
||||
private Collection<TermConceptParentChildLink> myChildren;
|
||||
|
||||
@Column(name = "CODE", length = 100, nullable = false)
|
||||
@Fields({ @Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")), })
|
||||
@Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),})
|
||||
private String myCode;
|
||||
|
||||
@ManyToOne()
|
||||
|
@ -87,10 +62,10 @@ public class TermConcept implements Serializable {
|
|||
private TermCodeSystemVersion myCodeSystem;
|
||||
|
||||
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
|
||||
@Fields({ @Field(name = "myCodeSystemVersionPid") })
|
||||
@Fields({@Field(name = "myCodeSystemVersionPid")})
|
||||
private long myCodeSystemVersionPid;
|
||||
|
||||
@Column(name="DISPLAY", length=MAX_DESC_LENGTH, nullable=true)
|
||||
@Column(name = "DISPLAY", length = MAX_DESC_LENGTH, nullable = true)
|
||||
@Fields({
|
||||
@Field(name = "myDisplay", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")),
|
||||
@Field(name = "myDisplayEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")),
|
||||
|
@ -99,24 +74,25 @@ public class TermConcept implements Serializable {
|
|||
})
|
||||
private String myDisplay;
|
||||
|
||||
@OneToMany(mappedBy="myConcept")
|
||||
@OneToMany(mappedBy = "myConcept")
|
||||
@Field
|
||||
@FieldBridge(impl = TermConceptPropertyFieldBridge.class)
|
||||
private Collection<TermConceptProperty> myProperties;
|
||||
|
||||
|
||||
@Id()
|
||||
@SequenceGenerator(name = "SEQ_CONCEPT_PID", sequenceName = "SEQ_CONCEPT_PID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PID")
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
|
||||
@Column(name = "INDEX_STATUS", nullable = true)
|
||||
private Long myIndexStatus;
|
||||
|
||||
@Transient
|
||||
@Field(name = "myParentPids", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "conceptParentPidsAnalyzer"))
|
||||
private String myParentPids;
|
||||
|
||||
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myChild")
|
||||
private Collection<TermConceptParentChildLink> myParents;
|
||||
@Column(name = "CODE_SEQUENCE", nullable = true)
|
||||
private Integer mySequence;
|
||||
|
||||
public TermConcept() {
|
||||
super();
|
||||
|
@ -145,6 +121,16 @@ public class TermConcept implements Serializable {
|
|||
}
|
||||
}
|
||||
|
||||
public void addProperty(@Nonnull String thePropertyName, @Nonnull String thePropertyValue) {
|
||||
Validate.notBlank(thePropertyName);
|
||||
|
||||
TermConceptProperty property = new TermConceptProperty();
|
||||
property.setConcept(this);
|
||||
property.setKey(thePropertyName);
|
||||
property.setValue(thePropertyValue);
|
||||
getProperties().add(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theObj) {
|
||||
if (!(theObj instanceof TermConcept)) {
|
||||
|
@ -164,7 +150,7 @@ public class TermConcept implements Serializable {
|
|||
|
||||
public Collection<TermConceptParentChildLink> getChildren() {
|
||||
if (myChildren == null) {
|
||||
myChildren = new ArrayList<TermConceptParentChildLink>();
|
||||
myChildren = new ArrayList<>();
|
||||
}
|
||||
return myChildren;
|
||||
}
|
||||
|
@ -173,14 +159,37 @@ public class TermConcept implements Serializable {
|
|||
return myCode;
|
||||
}
|
||||
|
||||
public Integer getSequence() {
|
||||
return mySequence;
|
||||
}
|
||||
|
||||
public void setCode(String theCode) {
|
||||
myCode = theCode;
|
||||
}
|
||||
|
||||
public TermCodeSystemVersion getCodeSystem() {
|
||||
return myCodeSystem;
|
||||
}
|
||||
|
||||
public void setCodeSystem(TermCodeSystemVersion theCodeSystem) {
|
||||
myCodeSystem = theCodeSystem;
|
||||
if (theCodeSystem.getPid() != null) {
|
||||
myCodeSystemVersionPid = theCodeSystem.getPid();
|
||||
}
|
||||
}
|
||||
|
||||
public String getDisplay() {
|
||||
return myDisplay;
|
||||
}
|
||||
|
||||
public TermConcept setDisplay(String theDisplay) {
|
||||
myDisplay = theDisplay;
|
||||
if (isNotBlank(theDisplay) && theDisplay.length() > MAX_DESC_LENGTH) {
|
||||
myDisplay = myDisplay.substring(0, MAX_DESC_LENGTH);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return myId;
|
||||
}
|
||||
|
@ -189,17 +198,37 @@ public class TermConcept implements Serializable {
|
|||
return myIndexStatus;
|
||||
}
|
||||
|
||||
public void setIndexStatus(Long theIndexStatus) {
|
||||
myIndexStatus = theIndexStatus;
|
||||
}
|
||||
|
||||
public String getParentPidsAsString() {
|
||||
return myParentPids;
|
||||
}
|
||||
|
||||
public Collection<TermConceptParentChildLink> getParents() {
|
||||
if (myParents == null) {
|
||||
myParents = new ArrayList<TermConceptParentChildLink>();
|
||||
myParents = new ArrayList<>();
|
||||
}
|
||||
return myParents;
|
||||
}
|
||||
|
||||
public Collection<TermConceptProperty> getProperties() {
|
||||
if (myProperties == null) {
|
||||
myProperties = new ArrayList<>();
|
||||
}
|
||||
return myProperties;
|
||||
}
|
||||
|
||||
public String getProperty(String thePropertyName) {
|
||||
for (TermConceptProperty next : getProperties()) {
|
||||
if (thePropertyName.equals(next.getKey())) {
|
||||
return next.getValue();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
HashCodeBuilder b = new HashCodeBuilder();
|
||||
|
@ -211,51 +240,31 @@ public class TermConcept implements Serializable {
|
|||
private void parentPids(TermConcept theNextConcept, Set<Long> theParentPids) {
|
||||
for (TermConceptParentChildLink nextParentLink : theNextConcept.getParents()) {
|
||||
TermConcept parent = nextParentLink.getParent();
|
||||
Long parentConceptId = parent.getId();
|
||||
Validate.notNull(parentConceptId);
|
||||
if (parent != null && theParentPids.add(parentConceptId)) {
|
||||
parentPids(parent, theParentPids);
|
||||
if (parent != null) {
|
||||
Long parentConceptId = parent.getId();
|
||||
Validate.notNull(parentConceptId);
|
||||
if (theParentPids.add(parentConceptId)) {
|
||||
parentPids(parent, theParentPids);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@PreUpdate
|
||||
@PrePersist
|
||||
public void prePersist() {
|
||||
if (myParentPids == null) {
|
||||
Set<Long> parentPids = new HashSet<Long>();
|
||||
Set<Long> parentPids = new HashSet<>();
|
||||
TermConcept entity = this;
|
||||
parentPids(entity, parentPids);
|
||||
entity.setParentPids(parentPids);
|
||||
|
||||
|
||||
ourLog.trace("Code {}/{} has parents {}", entity.getId(), entity.getCode(), entity.getParentPidsAsString());
|
||||
}
|
||||
}
|
||||
|
||||
public void setCode(String theCode) {
|
||||
myCode = theCode;
|
||||
}
|
||||
|
||||
public void setCodeSystem(TermCodeSystemVersion theCodeSystem) {
|
||||
myCodeSystem = theCodeSystem;
|
||||
if (theCodeSystem.getPid() != null) {
|
||||
myCodeSystemVersionPid = theCodeSystem.getPid();
|
||||
}
|
||||
}
|
||||
|
||||
public TermConcept setDisplay(String theDisplay) {
|
||||
myDisplay = theDisplay;
|
||||
if (isNotBlank(theDisplay) && theDisplay.length() > MAX_DESC_LENGTH) {
|
||||
myDisplay = myDisplay.substring(0, MAX_DESC_LENGTH);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public void setIndexStatus(Long theIndexStatus) {
|
||||
myIndexStatus = theIndexStatus;
|
||||
}
|
||||
|
||||
public void setParentPids(Set<Long> theParentPids) {
|
||||
private void setParentPids(Set<Long> theParentPids) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
for (Long next : theParentPids) {
|
||||
if (b.length() > 0) {
|
||||
|
@ -275,8 +284,13 @@ public class TermConcept implements Serializable {
|
|||
myParentPids = theParentPids;
|
||||
}
|
||||
|
||||
public void setSequence(Integer theSequence) {
|
||||
mySequence = theSequence;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("code", myCode).append("display", myDisplay).build();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,9 +20,11 @@ package ca.uhn.fhir.jpa.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import java.io.Serializable;
|
||||
import org.hibernate.search.annotations.Field;
|
||||
import org.hibernate.validator.constraints.NotBlank;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
|
||||
@Entity
|
||||
@Table(name = "TRM_CONCEPT_PROPERTY", uniqueConstraints = {
|
||||
|
@ -43,8 +45,9 @@ public class TermConceptProperty implements Serializable {
|
|||
private Long myId;
|
||||
|
||||
@Column(name="PROP_KEY", length=200, nullable=false)
|
||||
@NotBlank
|
||||
private String myKey;
|
||||
|
||||
|
||||
@Column(name="PROP_VAL", length=200, nullable=true)
|
||||
private String myValue;
|
||||
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.hibernate.search.bridge.FieldBridge;
|
||||
import org.hibernate.search.bridge.LuceneOptions;
|
||||
import org.hibernate.search.bridge.StringBridge;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* Allows hibernate search to index individual concepts' properties
|
||||
*/
|
||||
public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge {
|
||||
|
||||
public static final String PROP_PREFIX = "PROP__";
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TermConceptPropertyFieldBridge() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String objectToString(Object theObject) {
|
||||
return theObject.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(String theName, Object theValue, Document theDocument, LuceneOptions theLuceneOptions) {
|
||||
Collection<TermConceptProperty> properties = (Collection<TermConceptProperty>) theValue;
|
||||
if (properties != null) {
|
||||
for (TermConceptProperty next : properties) {
|
||||
String propValue = next.getKey() + "=" + next.getValue();
|
||||
theLuceneOptions.addFieldToDocument(theName, propValue, theDocument);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,14 +20,12 @@ package ca.uhn.fhir.jpa.term;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvc {
|
||||
public class HapiTerminologySvcDstu2 extends HapiTerminologySvcImpl {
|
||||
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
|
@ -39,10 +37,6 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvc {
|
|||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails) {
|
||||
// nothing yet
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -3,36 +3,39 @@ package ca.uhn.fhir.jpa.term;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.FullTextQuery;
|
||||
import org.hibernate.search.query.dsl.BooleanJunction;
|
||||
import org.hibernate.search.query.dsl.QueryBuilder;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
|
||||
import org.hl7.fhir.convertors.VersionConvertor_30_40;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem.CodeSystemContentMode;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent;
|
||||
import org.hl7.fhir.dstu3.model.CodeableConcept;
|
||||
import org.hl7.fhir.dstu3.model.Coding;
|
||||
import org.hl7.fhir.dstu3.model.StructureDefinition;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet.*;
|
||||
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.util.*;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
@ -47,9 +50,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -58,14 +61,50 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements IValidationSupport, IHapiTerminologySvcDstu3 {
|
||||
public class HapiTerminologySvcDstu3 implements IValidationSupport, IHapiTerminologySvcDstu3 {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcDstu3.class);
|
||||
private final VersionConvertor_30_40 myConverter;
|
||||
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
protected FhirContext myContext;
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myCodeSystemDao;
|
||||
|
||||
@Autowired
|
||||
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemResourceDao;
|
||||
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTerminologySvc;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public HapiTerminologySvcDstu3() {
|
||||
myConverter = new VersionConvertor_30_40();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
if (system != null) {
|
||||
findCodesBelow(system, theSystem, theCode, retVal);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void addAllChildren(String theSystemString, ConceptDefinitionComponent theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
if (isNotBlank(theCode.getCode())) {
|
||||
theListToPopulate.add(new VersionIndependentConcept(theSystemString, theCode.getCode()));
|
||||
}
|
||||
for (ConceptDefinitionComponent nextChild : theCode.getConcept()) {
|
||||
addAllChildren(theSystemString, nextChild, theListToPopulate);
|
||||
}
|
||||
}
|
||||
|
||||
private void addCodeIfNotAlreadyAdded(String system, ValueSetExpansionComponent retVal, Set<String> addedCodes, TermConcept nextConcept) {
|
||||
if (addedCodes.add(nextConcept.getCode())) {
|
||||
|
@ -76,36 +115,20 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
if (system != null) {
|
||||
findCodesBelow(system, theSystem, theCode, retVal);
|
||||
}
|
||||
return retVal;
|
||||
private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery());
|
||||
}
|
||||
|
||||
private void findCodesBelow(CodeSystem theSystem, String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
List<ConceptDefinitionComponent> conceptList = theSystem.getConcept();
|
||||
findCodesBelow(theSystemString, theCode, theListToPopulate, conceptList);
|
||||
}
|
||||
|
||||
private void findCodesBelow(String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate, List<ConceptDefinitionComponent> conceptList) {
|
||||
for (ConceptDefinitionComponent next : conceptList) {
|
||||
if (theCode.equals(next.getCode())) {
|
||||
addAllChildren(theSystemString, next, theListToPopulate);
|
||||
} else {
|
||||
findCodesBelow(theSystemString, theCode, theListToPopulate, next.getConcept());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void findCodesAbove(CodeSystem theSystem, String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
List<ConceptDefinitionComponent> conceptList = theSystem.getConcept();
|
||||
for (ConceptDefinitionComponent next : conceptList) {
|
||||
addTreeIfItContainsCode(theSystemString, next, theCode, theListToPopulate);
|
||||
}
|
||||
private void addDisplayFilterInexact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
|
||||
Query textQuery = qb
|
||||
.phrase()
|
||||
.withSlop(2)
|
||||
.onField("myDisplay").boostedTo(4.0f)
|
||||
.andField("myDisplayEdgeNGram").boostedTo(2.0f)
|
||||
// .andField("myDisplayNGram").boostedTo(1.0f)
|
||||
// .andField("myDisplayPhonetic").boostedTo(0.5f)
|
||||
.sentence(nextFilter.getValue().toLowerCase()).createQuery();
|
||||
bool.must(textQuery);
|
||||
}
|
||||
|
||||
private boolean addTreeIfItContainsCode(String theSystemString, ConceptDefinitionComponent theNext, String theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
|
@ -122,18 +145,9 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
return false;
|
||||
}
|
||||
|
||||
private void addAllChildren(String theSystemString, ConceptDefinitionComponent theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
if (isNotBlank(theCode.getCode())) {
|
||||
theListToPopulate.add(new VersionIndependentConcept(theSystemString, theCode.getCode()));
|
||||
}
|
||||
for (ConceptDefinitionComponent nextChild : theCode.getConcept()) {
|
||||
addAllChildren(theSystemString, nextChild, theListToPopulate);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
public List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
|
||||
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
if (system != null) {
|
||||
findCodesAbove(system, theSystem, theCode, retVal);
|
||||
|
@ -141,22 +155,6 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery());
|
||||
}
|
||||
|
||||
private void addDisplayFilterInexact(QueryBuilder qb, BooleanJunction<?> bool, ConceptSetFilterComponent nextFilter) {
|
||||
Query textQuery = qb
|
||||
.phrase()
|
||||
.withSlop(2)
|
||||
.onField("myDisplay").boostedTo(4.0f)
|
||||
.andField("myDisplayEdgeNGram").boostedTo(2.0f)
|
||||
// .andField("myDisplayNGram").boostedTo(1.0f)
|
||||
// .andField("myDisplayPhonetic").boostedTo(0.5f)
|
||||
.sentence(nextFilter.getValue().toLowerCase()).createQuery();
|
||||
bool.must(textQuery);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
|
||||
String system = theInclude.getSystem();
|
||||
|
@ -166,7 +164,7 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
|
||||
ValueSetExpansionComponent retVal = new ValueSetExpansionComponent();
|
||||
Set<String> addedCodes = new HashSet<String>();
|
||||
Set<String> addedCodes = new HashSet<>();
|
||||
boolean haveIncludeCriteria = false;
|
||||
|
||||
/*
|
||||
|
@ -176,7 +174,7 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
String nextCode = next.getCode();
|
||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
||||
haveIncludeCriteria = true;
|
||||
TermConcept code = super.findCode(system, nextCode);
|
||||
TermConcept code = myTerminologySvc.findCode(system, nextCode);
|
||||
if (code != null) {
|
||||
addedCodes.add(nextCode);
|
||||
ValueSetExpansionContainsComponent contains = retVal.addContains();
|
||||
|
@ -208,8 +206,8 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
|
||||
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == FilterOperator.EQUAL) {
|
||||
addDisplayFilterExact(qb, bool, nextFilter);
|
||||
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == FilterOperator.EQUAL) {
|
||||
|
@ -219,7 +217,7 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
addDisplayFilterInexact(qb, bool, nextFilter);
|
||||
}
|
||||
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == FilterOperator.ISA) {
|
||||
TermConcept code = super.findCode(system, nextFilter.getValue());
|
||||
TermConcept code = myTerminologySvc.findCode(system, nextFilter.getValue());
|
||||
if (code == null) {
|
||||
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
|
||||
}
|
||||
|
@ -236,12 +234,12 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
jpaQuery.setMaxResults(1000);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<TermConcept> result = jpaQuery.getResultList();
|
||||
|
||||
|
||||
ourLog.info("Expansion completed in {}ms", sw.getMillis());
|
||||
|
||||
|
||||
for (TermConcept nextConcept : result) {
|
||||
addCodeIfNotAlreadyAdded(system, retVal, addedCodes, nextConcept);
|
||||
}
|
||||
|
@ -250,7 +248,7 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
}
|
||||
|
||||
if (!haveIncludeCriteria) {
|
||||
List<TermConcept> allCodes = super.findCodes(system);
|
||||
List<TermConcept> allCodes = myTerminologySvc.findCodes(system);
|
||||
for (TermConcept nextConcept : allCodes) {
|
||||
addCodeIfNotAlreadyAdded(system, retVal, addedCodes, nextConcept);
|
||||
}
|
||||
|
@ -264,29 +262,6 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
ValueSet source = new ValueSet();
|
||||
source.getCompose().addInclude().addValueSet(theValueSet);
|
||||
try {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
|
||||
HapiWorkerContext worker = new HapiWorkerContext(myContext, myValidationSupport);
|
||||
ValueSetExpansionOutcome outcome = worker.expand(source, null);
|
||||
for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
|
||||
retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
||||
} catch (BaseServerResponseException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
|
||||
return Collections.emptyList();
|
||||
|
@ -303,25 +278,70 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
return null;
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
// ValueSet source = new ValueSet();
|
||||
// source.getCompose().addInclude().addValueSet(theValueSet);
|
||||
// try {
|
||||
// ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
//
|
||||
// HapiWorkerContext worker = new HapiWorkerContext(myContext, myValidationSupport);
|
||||
// ValueSetExpansionOutcome outcome = worker.expand(source, null);
|
||||
// for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
|
||||
// retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
|
||||
// }
|
||||
//
|
||||
// return retVal;
|
||||
//
|
||||
// } catch (BaseServerResponseException e) {
|
||||
// throw e;
|
||||
// } catch (Exception e) {
|
||||
// throw new InternalErrorException(e);
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
@CoverageIgnore
|
||||
@Override
|
||||
public StructureDefinition fetchStructureDefinition(FhirContext theCtx, String theUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
||||
return super.supportsSystem(theSystem);
|
||||
private void findCodesAbove(CodeSystem theSystem, String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
List<ConceptDefinitionComponent> conceptList = theSystem.getConcept();
|
||||
for (ConceptDefinitionComponent next : conceptList) {
|
||||
addTreeIfItContainsCode(theSystemString, next, theCode, theListToPopulate);
|
||||
}
|
||||
}
|
||||
|
||||
private void findCodesBelow(CodeSystem theSystem, String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
List<ConceptDefinitionComponent> conceptList = theSystem.getConcept();
|
||||
findCodesBelow(theSystemString, theCode, theListToPopulate, conceptList);
|
||||
}
|
||||
|
||||
private void findCodesBelow(String theSystemString, String theCode, List<VersionIndependentConcept> theListToPopulate, List<ConceptDefinitionComponent> conceptList) {
|
||||
for (ConceptDefinitionComponent next : conceptList) {
|
||||
if (theCode.equals(next.getCode())) {
|
||||
addAllChildren(theSystemString, next, theListToPopulate);
|
||||
} else {
|
||||
findCodesBelow(theSystemString, theCode, theListToPopulate, next.getConcept());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails) {
|
||||
CodeSystem cs = new org.hl7.fhir.dstu3.model.CodeSystem();
|
||||
cs.setUrl(theSystem);
|
||||
cs.setContent(CodeSystemContentMode.NOTPRESENT);
|
||||
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
||||
return myTerminologySvc.supportsSystem(theSystem);
|
||||
}
|
||||
|
||||
DaoMethodOutcome createOutcome = myCodeSystemResourceDao.create(cs, "CodeSystem?url=" + UrlUtil.escapeUrlParam(theSystem), theRequestDetails);
|
||||
@Override
|
||||
@org.springframework.transaction.annotation.Transactional(propagation = Propagation.REQUIRED)
|
||||
public void storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
|
||||
CodeSystem cs = new org.hl7.fhir.dstu3.model.CodeSystem();
|
||||
cs.setUrl(theCodeSystemResource.getUrl());
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
|
||||
DaoMethodOutcome createOutcome = myCodeSystemResourceDao.create(cs, "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()), theRequestDetails);
|
||||
IIdType csId = createOutcome.getId().toUnqualifiedVersionless();
|
||||
if (createOutcome.getCreated() != Boolean.TRUE) {
|
||||
CodeSystem existing = myCodeSystemResourceDao.read(csId, theRequestDetails);
|
||||
|
@ -336,15 +356,14 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc implements I
|
|||
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
|
||||
|
||||
theCodeSystemVersion.setResource(resource);
|
||||
theCodeSystemVersion.setResourceVersionId(resource.getVersion());
|
||||
super.storeNewCodeSystemVersion(codeSystemResourcePid, theSystem, theCodeSystemVersion);
|
||||
myTerminologySvc.storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemVersion);
|
||||
|
||||
}
|
||||
|
||||
@CoverageIgnore
|
||||
@Override
|
||||
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
|
||||
TermConcept code = super.findCode(theCodeSystem, theCode);
|
||||
TermConcept code = myTerminologySvc.findCode(theCodeSystem, theCode);
|
||||
if (code != null) {
|
||||
ConceptDefinitionComponent def = new ConceptDefinitionComponent();
|
||||
def.setCode(code.getCode());
|
||||
|
|
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -19,13 +19,32 @@ package ca.uhn.fhir.jpa.term;
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.persistence.*;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.ObjectUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.FullTextQuery;
|
||||
import org.hibernate.search.query.dsl.BooleanJunction;
|
||||
import org.hibernate.search.query.dsl.QueryBuilder;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
@ -37,71 +56,91 @@ import org.springframework.transaction.annotation.Transactional;
|
|||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.*;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.ObjectUtil;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
||||
private static boolean ourForceSaveDeferredAlwaysForUnitTest;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiTerminologySvc.class);
|
||||
public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcImpl.class);
|
||||
private static final Object PLACEHOLDER_OBJECT = new Object();
|
||||
|
||||
private ArrayListMultimap<Long, Long> myChildToParentPidCache;
|
||||
|
||||
private static boolean ourForceSaveDeferredAlwaysForUnitTest;
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myCodeSystemDao;
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
|
||||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
|
||||
private List<TermConceptParentChildLink> myConceptLinksToSaveLater = new ArrayList<TermConceptParentChildLink>();
|
||||
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
|
||||
private List<TermConcept> myConceptsToSaveLater = new ArrayList<TermConcept>();
|
||||
|
||||
@Autowired
|
||||
protected FhirContext myContext;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
private ArrayListMultimap<Long, Long> myChildToParentPidCache;
|
||||
@Autowired
|
||||
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
private List<TermConceptParentChildLink> myConceptLinksToSaveLater = new ArrayList<>();
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
private List<TermConcept> myConceptsToSaveLater = new ArrayList<>();
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private long myNextReindexPass;
|
||||
|
||||
private boolean myProcessDeferred = true;
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionMgr;
|
||||
@Autowired
|
||||
private IVersionSpecificValidationSupport myVersionSpecificValidationSupport;
|
||||
|
||||
private void addCodeIfNotAlreadyAdded(String system, ValueSet.ValueSetExpansionComponent retVal, Set<String> addedCodes, TermConcept nextConcept) {
|
||||
if (addedCodes.add(nextConcept.getCode())) {
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = retVal.addContains();
|
||||
contains.setCode(nextConcept.getCode());
|
||||
contains.setSystem(system);
|
||||
contains.setDisplay(nextConcept.getDisplay());
|
||||
}
|
||||
}
|
||||
|
||||
private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery());
|
||||
}
|
||||
|
||||
private void addDisplayFilterInexact(QueryBuilder qb, BooleanJunction<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
Query textQuery = qb
|
||||
.phrase()
|
||||
.withSlop(2)
|
||||
.onField("myDisplay").boostedTo(4.0f)
|
||||
.andField("myDisplayEdgeNGram").boostedTo(2.0f)
|
||||
// .andField("myDisplayNGram").boostedTo(1.0f)
|
||||
// .andField("myDisplayPhonetic").boostedTo(0.5f)
|
||||
.sentence(nextFilter.getValue().toLowerCase()).createQuery();
|
||||
bool.must(textQuery);
|
||||
}
|
||||
|
||||
private boolean addToSet(Set<TermConcept> theSetToPopulate, TermConcept theConcept) {
|
||||
boolean retVal = theSetToPopulate.add(theConcept);
|
||||
if (retVal) {
|
||||
if (theSetToPopulate.size() >= myDaoConfig.getMaximumExpansionSize()) {
|
||||
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvc.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
|
||||
String msg = myContext.getLocalizer().getMessage(HapiTerminologySvcImpl.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
|
||||
ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
|
||||
for (TermCodeSystemVersion next : myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystem.getPid())) {
|
||||
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
|
||||
myConceptDao.deleteByCodeSystemVersion(next.getPid());
|
||||
}
|
||||
myCodeSystemDao.delete(theCodeSystem.getPid());
|
||||
}
|
||||
|
||||
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
||||
ourLog.trace("Checking {} parents", theParents.size());
|
||||
int retVal = 0;
|
||||
|
@ -121,6 +160,127 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public ValueSet expandValueSet(ValueSet theValueSetToExpand) {
|
||||
|
||||
ValueSet.ConceptSetComponent include = theValueSetToExpand.getCompose().getIncludeFirstRep();
|
||||
String system = include.getSystem();
|
||||
ourLog.info("Starting expansion around code system: {}", system);
|
||||
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
|
||||
if (cs == null) {
|
||||
throw new InvalidRequestException("Unknown code system: " + system);
|
||||
}
|
||||
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
|
||||
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
|
||||
Set<String> addedCodes = new HashSet<>();
|
||||
boolean haveIncludeCriteria = false;
|
||||
|
||||
/*
|
||||
* Include Concepts
|
||||
*/
|
||||
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
|
||||
String nextCode = next.getCode();
|
||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
||||
haveIncludeCriteria = true;
|
||||
TermConcept code = findCode(system, nextCode);
|
||||
if (code != null) {
|
||||
addedCodes.add(nextCode);
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = expansionComponent.addContains();
|
||||
contains.setCode(nextCode);
|
||||
contains.setSystem(system);
|
||||
contains.setDisplay(code.getDisplay());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Filters
|
||||
*/
|
||||
|
||||
if (include.getFilter().size() > 0) {
|
||||
haveIncludeCriteria = true;
|
||||
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
|
||||
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
|
||||
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) {
|
||||
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
|
||||
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
|
||||
}
|
||||
|
||||
|
||||
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
addDisplayFilterExact(qb, bool, nextFilter);
|
||||
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
if (nextFilter.getValue().trim().contains(" ")) {
|
||||
addDisplayFilterExact(qb, bool, nextFilter);
|
||||
} else {
|
||||
addDisplayFilterInexact(qb, bool, nextFilter);
|
||||
}
|
||||
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||
|
||||
TermConcept code = findCode(system, nextFilter.getValue());
|
||||
if (code == null) {
|
||||
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
|
||||
}
|
||||
|
||||
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
|
||||
|
||||
} else {
|
||||
|
||||
// bool.must(qb.keyword().onField("myProperties").matching(nextFilter.getProperty()+"="+nextFilter.getValue()).createQuery());
|
||||
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty()+"="+nextFilter.getValue()).createQuery());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
Query luceneQuery = bool.createQuery();
|
||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
|
||||
jpaQuery.setMaxResults(1000);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<TermConcept> result = jpaQuery.getResultList();
|
||||
|
||||
ourLog.info("Expansion completed in {}ms", sw.getMillis());
|
||||
|
||||
for (TermConcept nextConcept : result) {
|
||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
||||
}
|
||||
|
||||
expansionComponent.setTotal(jpaQuery.getResultSize());
|
||||
}
|
||||
|
||||
if (!haveIncludeCriteria) {
|
||||
List<TermConcept> allCodes = findCodes(system);
|
||||
for (TermConcept nextConcept : allCodes) {
|
||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
||||
}
|
||||
}
|
||||
|
||||
ValueSet valueSet = new ValueSet();
|
||||
valueSet.setExpansion(expansionComponent);
|
||||
return valueSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
throw new UnsupportedOperationException(); // FIXME implement
|
||||
}
|
||||
|
||||
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
|
||||
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
|
||||
TermConcept nextChild = nextChildLink.getChild();
|
||||
|
@ -145,6 +305,7 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermConcept findCode(String theCodeSystem, String theCode) {
|
||||
TermCodeSystemVersion csv = findCurrentCodeSystemVersionForSystem(theCodeSystem);
|
||||
|
||||
|
@ -159,19 +320,19 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
@Override
|
||||
public Set<TermConcept> findCodesAbove(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
|
||||
Stopwatch stopwatch = Stopwatch.createStarted();
|
||||
StopWatch stopwatch = new StopWatch();
|
||||
|
||||
TermConcept concept = fetchLoadedCode(theCodeSystemResourcePid, theCodeSystemVersionPid, theCode);
|
||||
if (concept == null) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
Set<TermConcept> retVal = new HashSet<TermConcept>();
|
||||
Set<TermConcept> retVal = new HashSet<>();
|
||||
retVal.add(concept);
|
||||
|
||||
fetchParents(concept, retVal);
|
||||
|
||||
ourLog.info("Fetched {} codes above code {} in {}ms", new Object[] { retVal.size(), theCode, stopwatch.elapsed(TimeUnit.MILLISECONDS) });
|
||||
ourLog.info("Fetched {} codes above code {} in {}ms", new Object[] {retVal.size(), theCode, stopwatch.getMillis()});
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -179,27 +340,15 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
public List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode) {
|
||||
TermCodeSystem cs = getCodeSystem(theSystem);
|
||||
if (cs == null) {
|
||||
return findCodesAboveUsingBuiltInSystems(theSystem, theCode);
|
||||
return myVersionSpecificValidationSupport.findCodesAboveUsingBuiltInSystems(theSystem, theCode);
|
||||
}
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
|
||||
Set<TermConcept> codes = findCodesAbove(cs.getResource().getId(), csv.getResourceVersionId(), theCode);
|
||||
Set<TermConcept> codes = findCodesAbove(cs.getResource().getId(), csv.getPid(), theCode);
|
||||
ArrayList<VersionIndependentConcept> retVal = toVersionIndependentConcepts(theSystem, codes);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses may override
|
||||
*
|
||||
* @param theSystem
|
||||
* The code system
|
||||
* @param theCode
|
||||
* The code
|
||||
*/
|
||||
protected List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
@Override
|
||||
public Set<TermConcept> findCodesBelow(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
|
||||
|
@ -210,12 +359,12 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
Set<TermConcept> retVal = new HashSet<TermConcept>();
|
||||
Set<TermConcept> retVal = new HashSet<>();
|
||||
retVal.add(concept);
|
||||
|
||||
fetchChildren(concept, retVal);
|
||||
|
||||
ourLog.info("Fetched {} codes below code {} in {}ms", new Object[] { retVal.size(), theCode, stopwatch.elapsed(TimeUnit.MILLISECONDS) });
|
||||
ourLog.info("Fetched {} codes below code {} in {}ms", new Object[] {retVal.size(), theCode, stopwatch.elapsed(TimeUnit.MILLISECONDS)});
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -223,25 +372,12 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
public List<VersionIndependentConcept> findCodesBelow(String theSystem, String theCode) {
|
||||
TermCodeSystem cs = getCodeSystem(theSystem);
|
||||
if (cs == null) {
|
||||
return findCodesBelowUsingBuiltInSystems(theSystem, theCode);
|
||||
return myVersionSpecificValidationSupport.findCodesBelowUsingBuiltInSystems(theSystem, theCode);
|
||||
}
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
|
||||
Set<TermConcept> codes = findCodesBelow(cs.getResource().getId(), csv.getResourceVersionId(), theCode);
|
||||
ArrayList<VersionIndependentConcept> retVal = toVersionIndependentConcepts(theSystem, codes);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses may override
|
||||
*
|
||||
* @param theSystem
|
||||
* The code system
|
||||
* @param theCode
|
||||
* The code
|
||||
*/
|
||||
protected List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
return Collections.emptyList();
|
||||
Set<TermConcept> codes = findCodesBelow(cs.getResource().getId(), csv.getPid(), theCode);
|
||||
return toVersionIndependentConcepts(theSystem, codes);
|
||||
}
|
||||
|
||||
private TermCodeSystemVersion findCurrentCodeSystemVersionForSystem(String theCodeSystem) {
|
||||
|
@ -249,13 +385,11 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
if (cs == null || cs.getCurrentVersion() == null) {
|
||||
return null;
|
||||
}
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
return csv;
|
||||
return cs.getCurrentVersion();
|
||||
}
|
||||
|
||||
private TermCodeSystem getCodeSystem(String theSystem) {
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
|
||||
return cs;
|
||||
return myCodeSystemDao.findByCodeSystemUri(theSystem);
|
||||
}
|
||||
|
||||
private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack, int theTotalConcepts) {
|
||||
|
@ -314,7 +448,7 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
|
||||
if (codeCount > 0) {
|
||||
ourLog.info("Saved {} deferred concepts ({} codes remain and {} relationships remain) in {}ms ({}ms / code)",
|
||||
new Object[] { codeCount, myConceptsToSaveLater.size(), myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(codeCount) });
|
||||
new Object[] {codeCount, myConceptsToSaveLater.size(), myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(codeCount)});
|
||||
}
|
||||
|
||||
if (codeCount == 0) {
|
||||
|
@ -335,7 +469,7 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
|
||||
if (relCount > 0) {
|
||||
ourLog.info("Saved {} deferred relationships ({} remain) in {}ms ({}ms / code)",
|
||||
new Object[] { relCount, myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(codeCount) });
|
||||
new Object[] {relCount, myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(codeCount)});
|
||||
}
|
||||
|
||||
if ((myConceptsToSaveLater.size() + myConceptLinksToSaveLater.size()) == 0) {
|
||||
|
@ -413,7 +547,7 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
count++;
|
||||
}
|
||||
|
||||
ourLog.info("Indexed {} / {} concepts in {}ms - Avg {}ms / resource", new Object[] { count, concepts.getContent().size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(count) });
|
||||
ourLog.info("Indexed {} / {} concepts in {}ms - Avg {}ms / resource", new Object[] {count, concepts.getContent().size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(count)});
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -517,8 +651,8 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
myCodeSystemDao.save(codeSystem);
|
||||
} else {
|
||||
if (!ObjectUtil.equals(codeSystem.getResource().getId(), theCodeSystemVersion.getResource().getId())) {
|
||||
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvc.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri,
|
||||
codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
String msg = myContext.getLocalizer().getMessage(HapiTerminologySvcImpl.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri,
|
||||
codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
}
|
||||
|
@ -582,7 +716,7 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
}
|
||||
|
||||
private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, ArrayList<String> theConceptsStack,
|
||||
IdentityHashMap<TermConcept, Object> theAllConcepts) {
|
||||
IdentityHashMap<TermConcept, Object> theAllConcepts) {
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystem() != null, "CodesystemValue is null");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystem() == theCodeSystem, "CodeSystems are not equal");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "Codesystem contains a code with no code value");
|
||||
|
@ -618,15 +752,5 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
|
|||
ourForceSaveDeferredAlwaysForUnitTest = theForceSaveDeferredAlwaysForUnitTest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
|
||||
ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
|
||||
for (TermCodeSystemVersion next : myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystem.getPid())) {
|
||||
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
|
||||
myConceptDao.deleteByCodeSystemVersion(next.getPid());
|
||||
}
|
||||
myCodeSystemDao.delete(theCodeSystem.getPid());
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,19 +1,18 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.FullTextQuery;
|
||||
|
@ -21,21 +20,21 @@ import org.hibernate.search.query.dsl.BooleanJunction;
|
|||
import org.hibernate.search.query.dsl.QueryBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.r4.model.ValueSet.*;
|
||||
import org.hl7.fhir.r4.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.util.*;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
@ -50,9 +49,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -61,15 +60,21 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IValidationSupport, IHapiTerminologySvcR4 {
|
||||
public class HapiTerminologySvcR4 implements IHapiTerminologySvcR4 {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcR4.class);
|
||||
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myCodeSystemDao;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
@Qualifier("myCodeSystemDaoR4")
|
||||
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
||||
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTerminologySvc;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
|
||||
private void addAllChildren(String theSystemString, ConceptDefinitionComponent theCode, List<VersionIndependentConcept> theListToPopulate) {
|
||||
if (isNotBlank(theCode.getCode())) {
|
||||
|
@ -128,7 +133,7 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
|
||||
ValueSetExpansionComponent retVal = new ValueSetExpansionComponent();
|
||||
Set<String> addedCodes = new HashSet<String>();
|
||||
Set<String> addedCodes = new HashSet<>();
|
||||
boolean haveIncludeCriteria = false;
|
||||
|
||||
/*
|
||||
|
@ -138,7 +143,7 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
String nextCode = next.getCode();
|
||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
||||
haveIncludeCriteria = true;
|
||||
TermConcept code = super.findCode(system, nextCode);
|
||||
TermConcept code = myTerminologySvc.findCode(system, nextCode);
|
||||
if (code != null) {
|
||||
addedCodes.add(nextCode);
|
||||
ValueSetExpansionContainsComponent contains = retVal.addContains();
|
||||
|
@ -181,7 +186,7 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
addDisplayFilterInexact(qb, bool, nextFilter);
|
||||
}
|
||||
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == FilterOperator.ISA) {
|
||||
TermConcept code = super.findCode(system, nextFilter.getValue());
|
||||
TermConcept code = myTerminologySvc.findCode(system, nextFilter.getValue());
|
||||
if (code == null) {
|
||||
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
|
||||
}
|
||||
|
@ -212,7 +217,7 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
}
|
||||
|
||||
if (!haveIncludeCriteria) {
|
||||
List<TermConcept> allCodes = super.findCodes(system);
|
||||
List<TermConcept> allCodes = myTerminologySvc.findCodes(system);
|
||||
for (TermConcept nextConcept : allCodes) {
|
||||
addCodeIfNotAlreadyAdded(system, retVal, addedCodes, nextConcept);
|
||||
}
|
||||
|
@ -221,28 +226,28 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
ValueSet source = new ValueSet();
|
||||
source.getCompose().addInclude().addValueSet(theValueSet);
|
||||
try {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
|
||||
HapiWorkerContext worker = new HapiWorkerContext(myContext, myValidationSupport);
|
||||
ValueSetExpansionOutcome outcome = worker.expand(source, null);
|
||||
for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
|
||||
retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
||||
} catch (BaseServerResponseException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
}
|
||||
// @Override
|
||||
// public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
// ValueSet source = new ValueSet();
|
||||
// source.getCompose().addInclude().addValueSet(theValueSet);
|
||||
// try {
|
||||
// ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
//
|
||||
// HapiWorkerContext worker = new HapiWorkerContext(myContext, myValidationSupport);
|
||||
// ValueSetExpansionOutcome outcome = worker.expand(source, null);
|
||||
// for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
|
||||
// retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
|
||||
// }
|
||||
//
|
||||
// return retVal;
|
||||
//
|
||||
// } catch (BaseServerResponseException e) {
|
||||
// throw e;
|
||||
// } catch (Exception e) {
|
||||
// throw new InternalErrorException(e);
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
@Override
|
||||
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
|
||||
|
@ -279,8 +284,8 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
}
|
||||
|
||||
@Override
|
||||
protected List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
public List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
|
||||
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
if (system != null) {
|
||||
findCodesAbove(system, theSystem, theCode, retVal);
|
||||
|
@ -304,8 +309,8 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
}
|
||||
|
||||
@Override
|
||||
protected List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
|
||||
public List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode) {
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
|
||||
CodeSystem system = myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
if (system != null) {
|
||||
findCodesBelow(system, theSystem, theCode, retVal);
|
||||
|
@ -315,24 +320,15 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
|
||||
@Override
|
||||
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
||||
return super.supportsSystem(theSystem);
|
||||
return myTerminologySvc.supportsSystem(theSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails) {
|
||||
CodeSystem cs = new org.hl7.fhir.r4.model.CodeSystem();
|
||||
cs.setUrl(theSystem);
|
||||
cs.setContent(CodeSystemContentMode.NOTPRESENT);
|
||||
public void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets) {
|
||||
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
|
||||
|
||||
DaoMethodOutcome createOutcome = myCodeSystemResourceDao.create(cs, "CodeSystem?url=" + UrlUtil.escapeUrlParam(theSystem), theRequestDetails);
|
||||
IIdType csId = createOutcome.getId().toUnqualifiedVersionless();
|
||||
if (createOutcome.getCreated() != Boolean.TRUE) {
|
||||
CodeSystem existing = myCodeSystemResourceDao.read(csId, theRequestDetails);
|
||||
csId = myCodeSystemResourceDao.update(existing, null, false, true, theRequestDetails).getId();
|
||||
|
||||
ourLog.info("Created new version of CodeSystem, got ID: {}", csId.toUnqualified().getValue());
|
||||
}
|
||||
IIdType csId = myCodeSystemResourceDao.update(theCodeSystemResource, "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()), theRequestDetails).getId();
|
||||
|
||||
ResourceTable resource = (ResourceTable) myCodeSystemResourceDao.readEntity(csId);
|
||||
Long codeSystemResourcePid = resource.getId();
|
||||
|
@ -340,15 +336,14 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvc implements IVal
|
|||
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
|
||||
|
||||
theCodeSystemVersion.setResource(resource);
|
||||
theCodeSystemVersion.setResourceVersionId(resource.getVersion());
|
||||
super.storeNewCodeSystemVersion(codeSystemResourcePid, theSystem, theCodeSystemVersion);
|
||||
myTerminologySvc.storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemVersion);
|
||||
|
||||
}
|
||||
|
||||
@CoverageIgnore
|
||||
@Override
|
||||
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
|
||||
TermConcept code = super.findCode(theCodeSystem, theCode);
|
||||
TermConcept code = myTerminologySvc.findCode(theCodeSystem, theCode);
|
||||
if (code != null) {
|
||||
ConceptDefinitionComponent def = new ConceptDefinitionComponent();
|
||||
def.setCode(code.getCode());
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -11,9 +17,9 @@ import java.util.List;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -22,35 +28,26 @@ import java.util.List;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
||||
public interface IHapiTerminologySvc {
|
||||
|
||||
void deleteCodeSystem(TermCodeSystem thePersCs);
|
||||
|
||||
ValueSet expandValueSet(ValueSet theValueSetToExpand);
|
||||
|
||||
List<VersionIndependentConcept> expandValueSet(String theValueSet);
|
||||
|
||||
TermConcept findCode(String theCodeSystem, String theCode);
|
||||
|
||||
List<TermConcept> findCodes(String theSystem);
|
||||
|
||||
Set<TermConcept> findCodesAbove(Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode);
|
||||
|
||||
List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode);
|
||||
|
||||
Set<TermConcept> findCodesBelow(Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode);
|
||||
|
||||
List<VersionIndependentConcept> findCodesBelow(String theSystem, String theCode);
|
||||
|
||||
void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, TermCodeSystemVersion theCodeSytemVersion);
|
||||
|
||||
public boolean supportsSystem(String theCodeSystem);
|
||||
|
||||
List<VersionIndependentConcept> expandValueSet(String theValueSet);
|
||||
|
||||
List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode);
|
||||
|
||||
void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails);
|
||||
|
||||
List<TermConcept> findCodes(String theSystem);
|
||||
|
||||
void saveDeferred();
|
||||
|
||||
/**
|
||||
|
@ -59,4 +56,8 @@ public interface IHapiTerminologySvc {
|
|||
*/
|
||||
void setProcessDeferred(boolean theProcessDeferred);
|
||||
|
||||
void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, TermCodeSystemVersion theCodeSytemVersion);
|
||||
|
||||
boolean supportsSystem(String theCodeSystem);
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,6 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
|
||||
public interface IHapiTerminologySvcDstu3 extends IHapiTerminologySvc, IValidationSupport {
|
||||
// nothing
|
||||
public interface IHapiTerminologySvcDstu3 extends IValidationSupport, IVersionSpecificValidationSupport {
|
||||
|
||||
}
|
||||
|
|
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -22,6 +22,6 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
|
||||
public interface IHapiTerminologySvcR4 extends IHapiTerminologySvc, IValidationSupport {
|
||||
// nothing
|
||||
public interface IHapiTerminologySvcR4 extends IValidationSupport {
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
public interface IRecordHandler {
|
||||
void accept(CSVRecord theRecord);
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IVersionSpecificValidationSupport {
|
||||
|
||||
List<VersionIndependentConcept> findCodesAboveUsingBuiltInSystems(String theSystem, String theCode);
|
||||
|
||||
List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode);
|
||||
|
||||
void storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets);
|
||||
|
||||
}
|
|
@ -1,5 +1,40 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListHandler;
|
||||
import ca.uhn.fhir.jpa.term.loinc.LoincHandler;
|
||||
import ca.uhn.fhir.jpa.term.loinc.LoincHierarchyHandler;
|
||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept;
|
||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription;
|
||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
|
||||
import ca.uhn.fhir.jpa.util.Counter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.csv.CSVFormat;
|
||||
import org.apache.commons.csv.CSVParser;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.apache.commons.csv.QuoteMode;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
|
@ -11,9 +46,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -21,47 +56,29 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
import org.apache.commons.csv.*;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Charsets;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.util.Counter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
||||
public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
||||
private static final int LOG_INCREMENT = 100000;
|
||||
|
||||
public static final String LOINC_FILE = "loinc.csv";
|
||||
|
||||
public static final String LOINC_HIERARCHY_FILE = "MULTI-AXIAL_HIERARCHY.CSV";
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvc.class);
|
||||
|
||||
public static final String LOINC_ANSWERLIST_FILE = "AnswerList_Beta_1.csv";
|
||||
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink_Beta_1.csv";
|
||||
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
||||
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
||||
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
||||
private static final int LOG_INCREMENT = 100000;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvc.class);
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTermSvc;
|
||||
@Autowired(required = false)
|
||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
||||
@Autowired(required = false)
|
||||
private IHapiTerminologySvcR4 myTermSvcR4;
|
||||
|
||||
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) {
|
||||
|
||||
|
||||
theChain.add(theConcept.getCode());
|
||||
for (Iterator<TermConceptParentChildLink> childIter = theConcept.getChildren().iterator(); childIter.hasNext();) {
|
||||
for (Iterator<TermConceptParentChildLink> childIter = theConcept.getChildren().iterator(); childIter.hasNext(); ) {
|
||||
TermConceptParentChildLink next = childIter.next();
|
||||
TermConcept nextChild = next.getChild();
|
||||
if (theChain.contains(nextChild.getCode())) {
|
||||
|
@ -82,7 +99,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
ourLog.info(b.toString(), theConcept.getCode());
|
||||
childIter.remove();
|
||||
nextChild.getParents().remove(next);
|
||||
|
||||
|
||||
} else {
|
||||
dropCircularRefs(nextChild, theChain, theCode2concept, theCircularCounter);
|
||||
}
|
||||
|
@ -91,71 +108,21 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
}
|
||||
|
||||
private void extractFiles(List<byte[]> theZipBytes, List<String> theExpectedFilenameFragments) {
|
||||
Set<String> foundFragments = new HashSet<String>();
|
||||
|
||||
for (byte[] nextZipBytes : theZipBytes) {
|
||||
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(new ByteArrayInputStream(nextZipBytes)));
|
||||
try {
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null;) {
|
||||
for (String next : theExpectedFilenameFragments) {
|
||||
if (nextEntry.getName().contains(next)) {
|
||||
foundFragments.add(next);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(zis);
|
||||
}
|
||||
}
|
||||
|
||||
for (String next : theExpectedFilenameFragments) {
|
||||
if (!foundFragments.contains(next)) {
|
||||
throw new InvalidRequestException("Invalid input zip file, expected zip to contain the following name fragments: " + theExpectedFilenameFragments + " but found: " + foundFragments);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public String firstNonBlank(String... theStrings) {
|
||||
String retVal = "";
|
||||
for (String nextString : theStrings) {
|
||||
if (isNotBlank(nextString)) {
|
||||
retVal = nextString;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
|
||||
TermConcept concept = id2concept.get(id);
|
||||
if (concept == null) {
|
||||
concept = new TermConcept();
|
||||
id2concept.put(id, concept);
|
||||
concept.setCodeSystem(codeSystemVersion);
|
||||
}
|
||||
return concept;
|
||||
}
|
||||
|
||||
private void iterateOverZipFile(List<byte[]> theZipBytes, String fileNamePart, IRecordHandler handler, char theDelimiter, QuoteMode theQuoteMode) {
|
||||
boolean found = false;
|
||||
|
||||
for (byte[] nextZipBytes : theZipBytes) {
|
||||
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(new ByteArrayInputStream(nextZipBytes)));
|
||||
try {
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null;) {
|
||||
ZippedFileInputStream inputStream = new ZippedFileInputStream(zis);
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null; ) {
|
||||
|
||||
String nextFilename = nextEntry.getName();
|
||||
if (nextFilename.contains(fileNamePart)) {
|
||||
ourLog.info("Processing file {}", nextFilename);
|
||||
found = true;
|
||||
|
||||
Reader reader = null;
|
||||
CSVParser parsed = null;
|
||||
Reader reader;
|
||||
CSVParser parsed;
|
||||
try {
|
||||
reader = new InputStreamReader(new BOMInputStream(zis), Charsets.UTF_8);
|
||||
CSVFormat format = CSVFormat.newFormat(theDelimiter).withFirstRecordAsHeader();
|
||||
|
@ -197,9 +164,11 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
@Override
|
||||
public UploadStatistics loadLoinc(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
||||
List<String> expectedFilenameFragments = Arrays.asList(LOINC_FILE, LOINC_HIERARCHY_FILE);
|
||||
List<String> expectedFilenameFragments = Arrays.asList(
|
||||
LOINC_FILE,
|
||||
LOINC_HIERARCHY_FILE);
|
||||
|
||||
extractFiles(theZipBytes, expectedFilenameFragments);
|
||||
verifyMandatoryFilesExist(theZipBytes, expectedFilenameFragments);
|
||||
|
||||
ourLog.info("Beginning LOINC processing");
|
||||
|
||||
|
@ -210,7 +179,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
public UploadStatistics loadSnomedCt(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
||||
List<String> expectedFilenameFragments = Arrays.asList(SCT_FILE_DESCRIPTION, SCT_FILE_RELATIONSHIP, SCT_FILE_CONCEPT);
|
||||
|
||||
extractFiles(theZipBytes, expectedFilenameFragments);
|
||||
verifyMandatoryFilesExist(theZipBytes, expectedFilenameFragments);
|
||||
|
||||
ourLog.info("Beginning SNOMED CT processing");
|
||||
|
||||
|
@ -219,23 +188,41 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
UploadStatistics processLoincFiles(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
final Map<String, TermConcept> code2concept = new HashMap<String, TermConcept>();
|
||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||
final List<ValueSet> valueSets = new ArrayList<>();
|
||||
|
||||
IRecordHandler handler = new LoincHandler(codeSystemVersion, code2concept);
|
||||
CodeSystem loincCs;
|
||||
try {
|
||||
String loincCsString = IOUtils.toString(HapiTerminologySvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/loinc/loinc.xml"), Charsets.UTF_8);
|
||||
loincCs = FhirContext.forR4().newXmlParser().parseResource(CodeSystem.class, loincCsString);
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to load loinc.xml", e);
|
||||
}
|
||||
|
||||
Set<String> propertyNames = new HashSet<>();
|
||||
for (CodeSystem.PropertyComponent nextProperty : loincCs.getProperty()) {
|
||||
if (isNotBlank(nextProperty.getCode())) {
|
||||
propertyNames.add(nextProperty.getCode());
|
||||
}
|
||||
}
|
||||
|
||||
IRecordHandler handler;
|
||||
|
||||
// Loinc Codes
|
||||
handler = new LoincHandler(codeSystemVersion, code2concept, propertyNames);
|
||||
iterateOverZipFile(theZipBytes, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
|
||||
// Loinc Hierarchy
|
||||
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(theZipBytes, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
|
||||
// Answer lists (ValueSets of potential answers/values for loinc "questions")
|
||||
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, propertyNames, valueSets);
|
||||
iterateOverZipFile(theZipBytes, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
|
||||
theZipBytes.clear();
|
||||
|
||||
for (Iterator<Entry<String, TermConcept>> iter = code2concept.entrySet().iterator(); iter.hasNext();) {
|
||||
Entry<String, TermConcept> next = iter.next();
|
||||
// if (isBlank(next.getKey())) {
|
||||
// ourLog.info("Removing concept with blankc code[{}] and display [{}", next.getValue().getCode(), next.getValue().getDisplay());
|
||||
// iter.remove();
|
||||
// continue;
|
||||
// }
|
||||
|
||||
for (Entry<String, TermConcept> next : code2concept.entrySet()) {
|
||||
TermConcept nextConcept = next.getValue();
|
||||
if (nextConcept.getParents().isEmpty()) {
|
||||
codeSystemVersion.getConcepts().add(nextConcept);
|
||||
|
@ -244,18 +231,11 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
ourLog.info("Have {} total concepts, {} root concepts", code2concept.size(), codeSystemVersion.getConcepts().size());
|
||||
|
||||
String url = LOINC_URL;
|
||||
storeCodeSystem(theRequestDetails, codeSystemVersion, url);
|
||||
storeCodeSystem(theRequestDetails, codeSystemVersion, loincCs, valueSets);
|
||||
|
||||
return new UploadStatistics(code2concept.size());
|
||||
}
|
||||
|
||||
private void storeCodeSystem(RequestDetails theRequestDetails, final TermCodeSystemVersion codeSystemVersion, String url) {
|
||||
myTermSvc.setProcessDeferred(false);
|
||||
myTermSvc.storeNewCodeSystemVersion(url, codeSystemVersion, theRequestDetails);
|
||||
myTermSvc.setProcessDeferred(true);
|
||||
}
|
||||
|
||||
UploadStatistics processSnomedCtFiles(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
|
||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
final Map<String, TermConcept> id2concept = new HashMap<String, TermConcept>();
|
||||
|
@ -284,244 +264,99 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
iter.remove();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(), code2concept.size());
|
||||
|
||||
Counter circularCounter = new Counter();
|
||||
for (TermConcept next : rootConcepts.values()) {
|
||||
long count = circularCounter.getThenAdd();
|
||||
float pct = ((float)count / rootConcepts.size()) * 100.0f;
|
||||
float pct = ((float) count / rootConcepts.size()) * 100.0f;
|
||||
ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count, rootConcepts.size(), pct);
|
||||
dropCircularRefs(next, new ArrayList<String>(), code2concept, circularCounter);
|
||||
}
|
||||
|
||||
codeSystemVersion.getConcepts().addAll(rootConcepts.values());
|
||||
String url = SCT_URL;
|
||||
storeCodeSystem(theRequestDetails, codeSystemVersion, url);
|
||||
|
||||
CodeSystem cs = new org.hl7.fhir.r4.model.CodeSystem();
|
||||
cs.setUrl(SCT_URL);
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
storeCodeSystem(theRequestDetails, codeSystemVersion, cs, null);
|
||||
|
||||
return new UploadStatistics(code2concept.size());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setTermSvcDstu3ForUnitTest(IHapiTerminologySvcDstu3 theTermSvcDstu3) {
|
||||
myTermSvcDstu3 = theTermSvcDstu3;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setTermSvcForUnitTests(IHapiTerminologySvc theTermSvc) {
|
||||
myTermSvc = theTermSvc;
|
||||
}
|
||||
|
||||
private interface IRecordHandler {
|
||||
void accept(CSVRecord theRecord);
|
||||
private void storeCodeSystem(RequestDetails theRequestDetails, final TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystem, List<ValueSet> theValueSets) {
|
||||
Validate.isTrue(theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
|
||||
List<ValueSet> valueSets = ObjectUtils.defaultIfNull(theValueSets, Collections.<ValueSet>emptyList());
|
||||
|
||||
myTermSvc.setProcessDeferred(false);
|
||||
if (myTermSvcDstu3 != null) {
|
||||
myTermSvcDstu3.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets);
|
||||
} else {
|
||||
myTermSvcR4.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets);
|
||||
}
|
||||
myTermSvc.setProcessDeferred(true);
|
||||
}
|
||||
|
||||
public class LoincHandler implements IRecordHandler {
|
||||
private void verifyMandatoryFilesExist(List<byte[]> theZipBytes, List<String> theExpectedFilenameFragments) {
|
||||
Set<String> foundFragments = new HashSet<>();
|
||||
|
||||
private final Map<String, TermConcept> myCode2Concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
|
||||
public LoincHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myCode2Concept = theCode2concept;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String code = theRecord.get("LOINC_NUM");
|
||||
if (isNotBlank(code)) {
|
||||
String longCommonName = theRecord.get("LONG_COMMON_NAME");
|
||||
String shortName = theRecord.get("SHORTNAME");
|
||||
String consumerName = theRecord.get("CONSUMER_NAME");
|
||||
String display = firstNonBlank(longCommonName, shortName, consumerName);
|
||||
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, code);
|
||||
concept.setDisplay(display);
|
||||
|
||||
Validate.isTrue(!myCode2Concept.containsKey(code));
|
||||
myCode2Concept.put(code, concept);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class LoincHierarchyHandler implements IRecordHandler {
|
||||
|
||||
private Map<String, TermConcept> myCode2Concept;
|
||||
private TermCodeSystemVersion myCodeSystemVersion;
|
||||
|
||||
public LoincHierarchyHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myCode2Concept = theCode2concept;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String parentCode = theRecord.get("IMMEDIATE_PARENT");
|
||||
String childCode = theRecord.get("CODE");
|
||||
String childCodeText = theRecord.get("CODE_TEXT");
|
||||
|
||||
if (isNotBlank(parentCode) && isNotBlank(childCode)) {
|
||||
TermConcept parent = getOrCreate(parentCode, "(unknown)");
|
||||
TermConcept child = getOrCreate(childCode, childCodeText);
|
||||
|
||||
parent.addChild(child, RelationshipTypeEnum.ISA);
|
||||
}
|
||||
}
|
||||
|
||||
private TermConcept getOrCreate(String theCode, String theDisplay) {
|
||||
TermConcept retVal = myCode2Concept.get(theCode);
|
||||
if (retVal == null) {
|
||||
retVal = new TermConcept();
|
||||
retVal.setCodeSystem(myCodeSystemVersion);
|
||||
retVal.setCode(theCode);
|
||||
retVal.setDisplay(theDisplay);
|
||||
myCode2Concept.put(theCode, retVal);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private final class SctHandlerConcept implements IRecordHandler {
|
||||
|
||||
private Set<String> myValidConceptIds;
|
||||
private Map<String, String> myConceptIdToMostRecentDate = new HashMap<String, String>();
|
||||
|
||||
public SctHandlerConcept(Set<String> theValidConceptIds) {
|
||||
myValidConceptIds = theValidConceptIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String id = theRecord.get("id");
|
||||
String date = theRecord.get("effectiveTime");
|
||||
|
||||
if (!myConceptIdToMostRecentDate.containsKey(id) || myConceptIdToMostRecentDate.get(id).compareTo(date) < 0) {
|
||||
boolean active = "1".equals(theRecord.get("active"));
|
||||
if (active) {
|
||||
myValidConceptIds.add(id);
|
||||
} else {
|
||||
myValidConceptIds.remove(id);
|
||||
}
|
||||
myConceptIdToMostRecentDate.put(id, date);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private final class SctHandlerDescription implements IRecordHandler {
|
||||
private final Map<String, TermConcept> myCode2concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
private final Map<String, TermConcept> myId2concept;
|
||||
private Set<String> myValidConceptIds;
|
||||
|
||||
private SctHandlerDescription(Set<String> theValidConceptIds, Map<String, TermConcept> theCode2concept, Map<String, TermConcept> theId2concept, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
myCode2concept = theCode2concept;
|
||||
myId2concept = theId2concept;
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myValidConceptIds = theValidConceptIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String id = theRecord.get("id");
|
||||
boolean active = "1".equals(theRecord.get("active"));
|
||||
if (!active) {
|
||||
return;
|
||||
}
|
||||
String conceptId = theRecord.get("conceptId");
|
||||
if (!myValidConceptIds.contains(conceptId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
String term = theRecord.get("term");
|
||||
|
||||
TermConcept concept = getOrCreateConcept(myCodeSystemVersion, myId2concept, id);
|
||||
concept.setCode(conceptId);
|
||||
concept.setDisplay(term);
|
||||
myCode2concept.put(conceptId, concept);
|
||||
}
|
||||
}
|
||||
|
||||
private final class SctHandlerRelationship implements IRecordHandler {
|
||||
private final Map<String, TermConcept> myCode2concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
private final Map<String, TermConcept> myRootConcepts;
|
||||
|
||||
private SctHandlerRelationship(TermCodeSystemVersion theCodeSystemVersion, HashMap<String, TermConcept> theRootConcepts, Map<String, TermConcept> theCode2concept) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myRootConcepts = theRootConcepts;
|
||||
myCode2concept = theCode2concept;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
Set<String> ignoredTypes = new HashSet<String>();
|
||||
ignoredTypes.add("Method (attribute)");
|
||||
ignoredTypes.add("Direct device (attribute)");
|
||||
ignoredTypes.add("Has focus (attribute)");
|
||||
ignoredTypes.add("Access instrument");
|
||||
ignoredTypes.add("Procedure site (attribute)");
|
||||
ignoredTypes.add("Causative agent (attribute)");
|
||||
ignoredTypes.add("Course (attribute)");
|
||||
ignoredTypes.add("Finding site (attribute)");
|
||||
ignoredTypes.add("Has definitional manifestation (attribute)");
|
||||
|
||||
String sourceId = theRecord.get("sourceId");
|
||||
String destinationId = theRecord.get("destinationId");
|
||||
String typeId = theRecord.get("typeId");
|
||||
boolean active = "1".equals(theRecord.get("active"));
|
||||
|
||||
TermConcept typeConcept = myCode2concept.get(typeId);
|
||||
TermConcept sourceConcept = myCode2concept.get(sourceId);
|
||||
TermConcept targetConcept = myCode2concept.get(destinationId);
|
||||
if (sourceConcept != null && targetConcept != null && typeConcept != null) {
|
||||
if (typeConcept.getDisplay().equals("Is a (attribute)")) {
|
||||
RelationshipTypeEnum relationshipType = RelationshipTypeEnum.ISA;
|
||||
if (!sourceId.equals(destinationId)) {
|
||||
if (active) {
|
||||
TermConceptParentChildLink link = new TermConceptParentChildLink();
|
||||
link.setChild(sourceConcept);
|
||||
link.setParent(targetConcept);
|
||||
link.setRelationshipType(relationshipType);
|
||||
link.setCodeSystem(myCodeSystemVersion);
|
||||
|
||||
targetConcept.addChild(sourceConcept, relationshipType);
|
||||
} else {
|
||||
// not active, so we're removing any existing links
|
||||
for (TermConceptParentChildLink next : new ArrayList<TermConceptParentChildLink>(targetConcept.getChildren())) {
|
||||
if (next.getRelationshipType() == relationshipType) {
|
||||
if (next.getChild().getCode().equals(sourceConcept.getCode())) {
|
||||
next.getParent().getChildren().remove(next);
|
||||
next.getChild().getParents().remove(next);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (byte[] nextZipBytes : theZipBytes) {
|
||||
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(new ByteArrayInputStream(nextZipBytes)));
|
||||
try {
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null; ) {
|
||||
for (String next : theExpectedFilenameFragments) {
|
||||
if (nextEntry.getName().contains(next)) {
|
||||
foundFragments.add(next);
|
||||
}
|
||||
}
|
||||
} else if (ignoredTypes.contains(typeConcept.getDisplay())) {
|
||||
// ignore
|
||||
} else {
|
||||
// ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(zis);
|
||||
}
|
||||
}
|
||||
|
||||
for (String next : theExpectedFilenameFragments) {
|
||||
if (!foundFragments.contains(next)) {
|
||||
throw new InvalidRequestException("Invalid input zip file, expected zip to contain the following name fragments: " + theExpectedFilenameFragments + " but found: " + foundFragments);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class ZippedFileInputStream extends InputStream {
|
||||
|
||||
private ZipInputStream is;
|
||||
|
||||
public ZippedFileInputStream(ZipInputStream is) {
|
||||
this.is = is;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
is.closeEntry();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
return is.read();
|
||||
public static String firstNonBlank(String... theStrings) {
|
||||
String retVal = "";
|
||||
for (String nextString : theStrings) {
|
||||
if (isNotBlank(nextString)) {
|
||||
retVal = nextString;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
|
||||
TermConcept concept = id2concept.get(id);
|
||||
if (concept == null) {
|
||||
concept = new TermConcept();
|
||||
id2concept.put(id, concept);
|
||||
concept.setCodeSystem(codeSystemVersion);
|
||||
}
|
||||
return concept;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
package ca.uhn.fhir.jpa.term.loinc;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
public class LoincAnswerListHandler implements IRecordHandler {
|
||||
|
||||
private final Map<String, TermConcept> myCode2Concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
private final Set<String> myPropertyNames;
|
||||
private final List<ValueSet> myValueSets;
|
||||
private final Map<String, ValueSet> myIdToValueSet = new HashMap<>();
|
||||
|
||||
public LoincAnswerListHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept, Set<String> thePropertyNames, List<ValueSet> theValueSets) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myCode2Concept = theCode2concept;
|
||||
myPropertyNames = thePropertyNames;
|
||||
myValueSets = theValueSets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
|
||||
// this is the code for the list (will repeat)
|
||||
String answerListId = trim(theRecord.get("AnswerListId"));
|
||||
String answerListName = trim(theRecord.get("AnswerListName"));
|
||||
String answerListOid = trim(theRecord.get("AnswerListOID"));
|
||||
String externallyDefined = trim(theRecord.get("ExtDefinedYN"));
|
||||
String extenrallyDefinedCs = trim(theRecord.get("ExtDefinedAnswerListCodeSystem"));
|
||||
String externallyDefinedLink = trim(theRecord.get("ExtDefinedAnswerListLink"));
|
||||
// this is the code for the actual answer (will not repeat)
|
||||
String answerString = trim(theRecord.get("AnswerStringId"));
|
||||
String sequenceNumber = trim(theRecord.get("SequenceNumber"));
|
||||
String displayText = trim(theRecord.get("DisplayText"));
|
||||
String extCodeId = trim(theRecord.get("ExtCodeId"));
|
||||
String extCodeDisplayName = trim(theRecord.get("ExtCodeDisplayName"));
|
||||
String extCodeSystem = trim(theRecord.get("ExtCodeSystem"));
|
||||
String extCodeSystemVersion = trim(theRecord.get("ExtCodeSystemVersion"));
|
||||
|
||||
// Answer list code
|
||||
if (!myCode2Concept.containsKey(answerListId)) {
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, answerListId);
|
||||
concept.setDisplay(answerListName);
|
||||
myCode2Concept.put(answerListId, concept);
|
||||
}
|
||||
|
||||
// Answer code
|
||||
if (!myCode2Concept.containsKey(answerString)) {
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, answerString);
|
||||
concept.setDisplay(displayText);
|
||||
if (isNotBlank(sequenceNumber) && sequenceNumber.matches("^[0-9]$")) {
|
||||
concept.setSequence(Integer.parseInt(sequenceNumber));
|
||||
}
|
||||
myCode2Concept.put(answerString, concept);
|
||||
}
|
||||
|
||||
// Answer list ValueSet
|
||||
ValueSet vs;
|
||||
if (!myIdToValueSet.containsKey(answerListId)) {
|
||||
vs = new ValueSet();
|
||||
vs.setUrl("urn:oid:" + answerListOid);
|
||||
vs.addIdentifier()
|
||||
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URL)
|
||||
.setValue(answerListId);
|
||||
vs.setId(answerListId);
|
||||
vs.setName(answerListName);
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
myIdToValueSet.put(answerListId, vs);
|
||||
myValueSets.add(vs);
|
||||
} else {
|
||||
vs = myIdToValueSet.get(answerListId);
|
||||
}
|
||||
vs
|
||||
.getCompose()
|
||||
.getIncludeFirstRep()
|
||||
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URL)
|
||||
.addConcept()
|
||||
.setCode(answerString)
|
||||
.setDisplay(displayText);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
package ca.uhn.fhir.jpa.term.loinc;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
public class LoincHandler implements IRecordHandler {
|
||||
|
||||
private final Map<String, TermConcept> myCode2Concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
private final Set<String> myPropertyNames;
|
||||
|
||||
public LoincHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept, Set<String> thePropertyNames) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myCode2Concept = theCode2concept;
|
||||
myPropertyNames = thePropertyNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String code = trim(theRecord.get("LOINC_NUM"));
|
||||
if (isNotBlank(code)) {
|
||||
String longCommonName = trim(theRecord.get("LONG_COMMON_NAME"));
|
||||
String shortName = trim(theRecord.get("SHORTNAME"));
|
||||
String consumerName = trim(theRecord.get("CONSUMER_NAME"));
|
||||
String display = TerminologyLoaderSvc.firstNonBlank(longCommonName, shortName, consumerName);
|
||||
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, code);
|
||||
concept.setDisplay(display);
|
||||
|
||||
for (String nextPropertyName : myPropertyNames) {
|
||||
if (!theRecord.toMap().containsKey(nextPropertyName)) {
|
||||
continue;
|
||||
}
|
||||
String nextPropertyValue = theRecord.get(nextPropertyName);
|
||||
if (isNotBlank(nextPropertyValue)) {
|
||||
concept.addProperty(nextPropertyName, nextPropertyValue);
|
||||
}
|
||||
}
|
||||
|
||||
Validate.isTrue(!myCode2Concept.containsKey(code));
|
||||
myCode2Concept.put(code, concept);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
package ca.uhn.fhir.jpa.term.loinc;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
public class LoincHierarchyHandler implements IRecordHandler {
|
||||
|
||||
private Map<String, TermConcept> myCode2Concept;
|
||||
private TermCodeSystemVersion myCodeSystemVersion;
|
||||
|
||||
public LoincHierarchyHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myCode2Concept = theCode2concept;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String parentCode = trim(theRecord.get("IMMEDIATE_PARENT"));
|
||||
String childCode = trim(theRecord.get("CODE"));
|
||||
String childCodeText = trim(theRecord.get("CODE_TEXT"));
|
||||
|
||||
if (isNotBlank(parentCode) && isNotBlank(childCode)) {
|
||||
TermConcept parent = getOrCreate(parentCode, "(unknown)");
|
||||
TermConcept child = getOrCreate(childCode, childCodeText);
|
||||
|
||||
parent.addChild(child, TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||
}
|
||||
}
|
||||
|
||||
private TermConcept getOrCreate(String theCode, String theDisplay) {
|
||||
TermConcept retVal = myCode2Concept.get(theCode);
|
||||
if (retVal == null) {
|
||||
retVal = new TermConcept();
|
||||
retVal.setCodeSystem(myCodeSystemVersion);
|
||||
retVal.setCode(theCode);
|
||||
retVal.setDisplay(theDisplay);
|
||||
myCode2Concept.put(theCode, retVal);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
package ca.uhn.fhir.jpa.term.snomedct;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public final class SctHandlerConcept implements IRecordHandler {
|
||||
|
||||
private Set<String> myValidConceptIds;
|
||||
private Map<String, String> myConceptIdToMostRecentDate = new HashMap<String, String>();
|
||||
|
||||
public SctHandlerConcept(Set<String> theValidConceptIds) {
|
||||
myValidConceptIds = theValidConceptIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String id = theRecord.get("id");
|
||||
String date = theRecord.get("effectiveTime");
|
||||
|
||||
if (!myConceptIdToMostRecentDate.containsKey(id) || myConceptIdToMostRecentDate.get(id).compareTo(date) < 0) {
|
||||
boolean active = "1".equals(theRecord.get("active"));
|
||||
if (active) {
|
||||
myValidConceptIds.add(id);
|
||||
} else {
|
||||
myValidConceptIds.remove(id);
|
||||
}
|
||||
myConceptIdToMostRecentDate.put(id, date);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
package ca.uhn.fhir.jpa.term.snomedct;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public final class SctHandlerDescription implements IRecordHandler {
|
||||
private final Map<String, TermConcept> myCode2concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
private final Map<String, TermConcept> myId2concept;
|
||||
private Set<String> myValidConceptIds;
|
||||
|
||||
public SctHandlerDescription(Set<String> theValidConceptIds, Map<String, TermConcept> theCode2concept, Map<String, TermConcept> theId2concept, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
myCode2concept = theCode2concept;
|
||||
myId2concept = theId2concept;
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myValidConceptIds = theValidConceptIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String id = theRecord.get("id");
|
||||
boolean active = "1".equals(theRecord.get("active"));
|
||||
if (!active) {
|
||||
return;
|
||||
}
|
||||
String conceptId = theRecord.get("conceptId");
|
||||
if (!myValidConceptIds.contains(conceptId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
String term = theRecord.get("term");
|
||||
|
||||
TermConcept concept = TerminologyLoaderSvc.getOrCreateConcept(myCodeSystemVersion, myId2concept, id);
|
||||
concept.setCode(conceptId);
|
||||
concept.setDisplay(term);
|
||||
myCode2concept.put(conceptId, concept);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
package ca.uhn.fhir.jpa.term.snomedct;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public final class SctHandlerRelationship implements IRecordHandler {
|
||||
private final Map<String, TermConcept> myCode2concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
private final Map<String, TermConcept> myRootConcepts;
|
||||
|
||||
public SctHandlerRelationship(TermCodeSystemVersion theCodeSystemVersion, HashMap<String, TermConcept> theRootConcepts, Map<String, TermConcept> theCode2concept) {
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
myRootConcepts = theRootConcepts;
|
||||
myCode2concept = theCode2concept;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
Set<String> ignoredTypes = new HashSet<String>();
|
||||
ignoredTypes.add("Method (attribute)");
|
||||
ignoredTypes.add("Direct device (attribute)");
|
||||
ignoredTypes.add("Has focus (attribute)");
|
||||
ignoredTypes.add("Access instrument");
|
||||
ignoredTypes.add("Procedure site (attribute)");
|
||||
ignoredTypes.add("Causative agent (attribute)");
|
||||
ignoredTypes.add("Course (attribute)");
|
||||
ignoredTypes.add("Finding site (attribute)");
|
||||
ignoredTypes.add("Has definitional manifestation (attribute)");
|
||||
|
||||
String sourceId = theRecord.get("sourceId");
|
||||
String destinationId = theRecord.get("destinationId");
|
||||
String typeId = theRecord.get("typeId");
|
||||
boolean active = "1".equals(theRecord.get("active"));
|
||||
|
||||
TermConcept typeConcept = myCode2concept.get(typeId);
|
||||
TermConcept sourceConcept = myCode2concept.get(sourceId);
|
||||
TermConcept targetConcept = myCode2concept.get(destinationId);
|
||||
if (sourceConcept != null && targetConcept != null && typeConcept != null) {
|
||||
if (typeConcept.getDisplay().equals("Is a (attribute)")) {
|
||||
TermConceptParentChildLink.RelationshipTypeEnum relationshipType = TermConceptParentChildLink.RelationshipTypeEnum.ISA;
|
||||
if (!sourceId.equals(destinationId)) {
|
||||
if (active) {
|
||||
TermConceptParentChildLink link = new TermConceptParentChildLink();
|
||||
link.setChild(sourceConcept);
|
||||
link.setParent(targetConcept);
|
||||
link.setRelationshipType(relationshipType);
|
||||
link.setCodeSystem(myCodeSystemVersion);
|
||||
|
||||
targetConcept.addChild(sourceConcept, relationshipType);
|
||||
} else {
|
||||
// not active, so we're removing any existing links
|
||||
for (TermConceptParentChildLink next : new ArrayList<TermConceptParentChildLink>(targetConcept.getChildren())) {
|
||||
if (next.getRelationshipType() == relationshipType) {
|
||||
if (next.getChild().getCode().equals(sourceConcept.getCode())) {
|
||||
next.getParent().getChildren().remove(next);
|
||||
next.getChild().getParents().remove(next);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (ignoredTypes.contains(typeConcept.getDisplay())) {
|
||||
// ignore
|
||||
} else {
|
||||
// ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,442 @@
|
|||
<!--
|
||||
LOINC is a well maintained, version independent code system
|
||||
|
||||
This CodeSystem resource describes 'LOINC' independent of
|
||||
any particular version. There are notes about changes for
|
||||
version specific LOINC code system resources.
|
||||
|
||||
Note that the following set of codes constitute the
|
||||
LOINC code systems:
|
||||
- the main LOINC codes
|
||||
- the LOINC answer codes (LA-) and the LOINC answer list codes (LL-)
|
||||
- the Part codes in the Multiaxial Hierarchy
|
||||
- the Part codes for the properties.
|
||||
Note: there are license restrictions on the use of LOINC Part codes
|
||||
|
||||
Servers may generate variants of this for the LOINC version(s) and features they support.
|
||||
-->
|
||||
<!--
|
||||
Version History of this specification
|
||||
0.1 | published 2016 11 18
|
||||
0.2 | published 2017 03 10 (fixed rad properties, removed list-specific LA properties, typos)
|
||||
0.3 | published 2017 05 09 (removed CHNGE_TYP based on LOINC Committee recommendation, change filter types from code vs coding -> which allows use of the LP codes or the Part names)
|
||||
0.4 | published 2018 02 09 (fixed multiaxial hierarchy relationship, added clarifying statement about English as the language for filters)
|
||||
-->
|
||||
<CodeSystem xmlns="http://hl7.org/fhir">
|
||||
<id value="loinc"/>
|
||||
|
||||
<!-- This url is unchanged for all versions of LOINC. There
|
||||
can only be one correct Code System resource for each value of the
|
||||
version attribute (at least, only one per server) -->
|
||||
<url value="http://loinc.org"/>
|
||||
|
||||
<!-- the HL7 v3 OID assigned to LOINC -->
|
||||
<identifier>
|
||||
<system value="urn:ietf:rfc:3986"/>
|
||||
<value value="urn:oid:2.16.840.1.113883.6.1"/>
|
||||
</identifier>
|
||||
|
||||
<!--
|
||||
// if a version is specified:
|
||||
<version value="2.59"/>
|
||||
-->
|
||||
|
||||
<!-- if a specific version is specified, the name should carry this information should be in the name (e.g. LOINC_259) and title -->
|
||||
<name value="LOINC"/>
|
||||
<title value="LOINC Code System"/>
|
||||
<status value="active"/>
|
||||
<experimental value="false"/>
|
||||
|
||||
<publisher value="Regenstrief Institute, Inc."/>
|
||||
<contact>
|
||||
<telecom>
|
||||
<value value="http://loinc.org"/>
|
||||
</telecom>
|
||||
</contact>
|
||||
|
||||
<!--
|
||||
<date value=[date for this version]"/>
|
||||
-->
|
||||
<description value="LOINC is a freely available international standard for tests, measurements, and observations"/>
|
||||
<copyright value="This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at http://loinc.org/terms-of-use"/>
|
||||
<caseSensitive value="false"/>
|
||||
|
||||
<valueSet value=" http://loinc.org/vs"/>
|
||||
<!--
|
||||
for a version specific reference:
|
||||
<valueSet value="http://loinc.org/2.56/vs"/>
|
||||
-->
|
||||
|
||||
<!--
|
||||
It's at the discretion of servers whether to present fragments of LOINC heirarchically or not, when
|
||||
using the code system resource. But, if they are heirarchical, the Hierarchy SHALL be based on the is-a relationship that is derived from the LOINC Multiaxial Hierarchy.
|
||||
-->
|
||||
<HierarchyMeaning value="is-a"/>
|
||||
<compositional value="false"/> <!-- no compositional grammar in LOINC -->
|
||||
<versionNeeded value="false"/>
|
||||
|
||||
<!-- this canonical definition of LOINC does not include the content.
|
||||
Servers may choose to include fragments (but not, due to size constraints, all of LOINC) -->
|
||||
<content value="not-present"/>
|
||||
|
||||
<!-- <count value="65000"/>... if working with a specific version, you could nominate a count of the total number of concepts (including the answers, Hierarchy, etc.) -->
|
||||
|
||||
<!--
|
||||
Generally defined filters for specifying value sets
|
||||
In LOINC, all the properties can be used as filters too, but they are not defined explicitly as filters as well.
|
||||
Note that parent/child/ancestor/descendant are defined by FHIR, but repeated here to document them clearly.
|
||||
|
||||
For illustration purposes, consider this slice of the LOINC Multiaxial Hierarchy when reading the descriptions:
|
||||
|
||||
Microbiology [LP31755-9]
|
||||
Microorganism [LP14559-6]
|
||||
Virus [LP14855-8]
|
||||
Zika virus [LP200137-0]
|
||||
Zika virus RNA | XXX [LP203413-2]
|
||||
Zika virus RNA [Presence] in Unspecified specimen by Probe and target amplification method [79190-5]
|
||||
|
||||
Language Note: The filters defined here are specified using the default LOINC language - English (US). Requests are meant to be specified and interpreted on the English version. The return can be in a specified language (if supported by the server). But note that not all filters/properties have language translations available.
|
||||
-->
|
||||
<filter>
|
||||
<code value="parent"/>
|
||||
<description value="Allows for the selection of a set of codes based on their appearance in the LOINC Multiaxial Hierarchy. parent selects immediate children only. For example, the code '79190-5' has the parent 'LP203413-2'"/>
|
||||
<operator value="="/>
|
||||
<value value="A Part code"/>
|
||||
</filter>
|
||||
<filter>
|
||||
<code value="child"/>
|
||||
<description value="Allows for the selection of a set of codes based on their appearance in the LOINC Multiaxial Hierarchy. child selects immediate children only. For example, the code 'LP203413-2' has the child '79190-5'"/>
|
||||
<operator value="in"/>
|
||||
<value value="A comma separated list of Part codes"/>
|
||||
</filter>
|
||||
<filter>
|
||||
<code value="ancestor"/>
|
||||
<description value="Allows for the selection of a set of codes based on their appearance in the LOINC Multiaxial Hierarchy. ancestor includes parents transitively, e.g. 'LP203413-2' eventually has an ancestor 'LP14559-6', so the code '79190-5' is in the set of codes that have ancestor=LP14559-6"/>
|
||||
<operator value="="/>
|
||||
<value value="A Part code"/>
|
||||
</filter>
|
||||
<filter>
|
||||
<code value="descendant"/>
|
||||
<description value="Allows for the selection of a set of codes based on their appearance in the LOINC Multiaxial Hierarchy. descendant includes children transitively, e.g. 'LP14559-6' eventually has a descendant 'LP203413-2', so the code '79190-5' is in the set of codes that have descendant=LP14559-6"/>
|
||||
<operator value="in"/>
|
||||
<value value="A comma separated list of Part codes"/>
|
||||
</filter>
|
||||
<filter>
|
||||
<code value="copyright"/>
|
||||
<description value="Allows for the inclusion or exclusion of LOINC codes that include 3rd party copyright notices. LOINC = only codes with a sole copyright by Regenstrief. 3rdParty = only codes with a 3rd party copyright in addition to the one from Regenstrief"/>
|
||||
<operator value="="/>
|
||||
<value value="LOINC | 3rdParty"/>
|
||||
</filter>
|
||||
<!-- properties. There are 3 kinds of properties:
|
||||
fhir: display, designation; these are not described here since they are inherent in the specification
|
||||
infrastructural: defined by FHIR, but documented here for LOINC
|
||||
LOINC properties: defined by the main LOINC table
|
||||
concept model: defined by the LOINC Multiaxial Hierarchy
|
||||
-->
|
||||
<!-- first, the infrastructural properties - inherited from FHIR, but documented here -->
|
||||
<property>
|
||||
<code value="parent"/>
|
||||
<uri value="http://hl7.org/fhir/concept-properties#parent"/>
|
||||
<description value="A parent code in the Multiaxial Hierarchy"/>
|
||||
<type value=""/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="child"/>
|
||||
<uri value="http://hl7.org/fhir/concept-properties#child"/>
|
||||
<description value="A child code in the Multiaxial Hierarchy"/>
|
||||
<type value=""/>
|
||||
</property>
|
||||
<!--
|
||||
LOINC properties.
|
||||
These apply to the main LOINC codes, but not the Multiaxial Hierarchy, the answer lists, or the part codes.
|
||||
|
||||
Notes:
|
||||
SHORTNAME = display & LONG_COMMON_NAME = definition
|
||||
Properties are specified as type "code", which are LOINC Part codes (LP-).
|
||||
It is anticipated that the LOINC Part codes to be used in these properties will be published in the June 2017 LOINC release.
|
||||
-->
|
||||
<property>
|
||||
<code value="STATUS"/>
|
||||
<uri value="http://loinc.org/property/STATUS"/>
|
||||
<description value="Status of the term. Within LOINC, codes with STATUS=DEPRECATED are considered inactive. Current values: ACTIVE, TRIAL, DISCOURAGED, and DEPRECATED"/>
|
||||
<!-- DV NOTE: changed this from boolean to string -->
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="COMPONENT"/>
|
||||
<uri value="http://loinc.org/property/COMPONENT"/>
|
||||
<description value="First major axis-component or analyte: Analyte Name, Analyte sub-class, Challenge"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="PROPERTY"/>
|
||||
<uri value="http://loinc.org/property/PROPERTY"/>
|
||||
<description value="Second major axis-property observed: Kind of Property (also called kind of quantity)"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="TIME_ASPCT"/>
|
||||
<uri value="http://loinc.org/property/TIME_ASPCT"/>
|
||||
<description value="Third major axis-timing of the measurement: Time Aspect (Point or moment in time vs. time interval)"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="SYSTEM"/>
|
||||
<uri value="http://loinc.org/property/SYSTEM"/>
|
||||
<description value="Fourth major axis-type of specimen or system: System (Sample) Type"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="SCALE_TYP"/>
|
||||
<uri value="http://loinc.org/property/SCALE_TYP"/>
|
||||
<description value="Fifth major axis-scale of measurement: Type of Scale"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="METHOD_TYP"/>
|
||||
<uri value="http://loinc.org/property/METHOD_TYP"/>
|
||||
<description value="Sixth major axis-method of measurement: Type of Method"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="CLASS"/>
|
||||
<uri value="http://loinc.org/property/CLASS"/>
|
||||
<description value="An arbitrary classification of the terms for grouping related observations together"/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<!-- Note: removed in 0.3
|
||||
<property>
|
||||
<code value="CHNG_TYPE"/>
|
||||
<uri value="http://loinc.org/property/CHNG_TYPE"/>
|
||||
<description value="A classification of the type of change made to a LOINC term, e.g. DEL=deprecated, ADD=add"/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
-->
|
||||
<property>
|
||||
<code value="VersionLastChanged"/>
|
||||
<uri value="http://loinc.org/property/VersionLastChanged"/>
|
||||
<description value="The LOINC version number in which the record has last changed. For new records, this field contains the same value as the FirstPublishedRelease property."/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="CONSUMER_NAME"/>
|
||||
<uri value="http://loinc.org/property/CONSUMER_NAME"/>
|
||||
<description value="An experimental (beta) consumer friendly name for this item. The intent is to provide a test name that health care consumers will recognize; it will be similar to the names that might appear on a lab report"/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="CLASSTYPE"/>
|
||||
<uri value="http://loinc.org/property/CLASSTYPE"/>
|
||||
<description value="1=Laboratory class; 2=Clinical class; 3=Claims attachments; 4=Surveys"/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="ORDER_OBS"/>
|
||||
<uri value="http://loinc.org/property/ORDER_OBS"/>
|
||||
<description value="Provides users with an idea of the intended use of the term by categorizing it as an order only, observation only, or both"/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="HL7_ATTACHMENT_STRUCTURE"/>
|
||||
<uri value="http://loinc.org/property/HL7_ATTACHMENT_STRUCTURE"/>
|
||||
<description value="This property is populated in collaboration with the HL7 Attachments Work Group as described in the HL7 Attachment Specification: Supplement to Consolidated CDA Templated Guide."/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="VersionFirstReleased"/>
|
||||
<uri value="http://loinc.org/property/VersionFirstReleased"/>
|
||||
<description value="This is the LOINC version number in which this LOINC term was first published."/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="PanelType"/>
|
||||
<uri value="http://loinc.org/property/PanelType"/>
|
||||
<description value="For LOINC terms that are panels, this attribute classifies them as a 'Convenience group', 'Organizer', or 'Panel'"/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="ValidHL7AttachmentRequest"/>
|
||||
<uri value="http://loinc.org/property/ValidHL7AttachmentRequest"/>
|
||||
<description value="A value of Y in this field indicates that this LOINC code can be sent by a payer as part of an HL7 Attachment request for additional information."/>
|
||||
<type value="string"/>
|
||||
</property>
|
||||
|
||||
<!-- LOINC/RSNA Radiology Playbook properties. These apply only to terms in the LOINC/RSNA Radiology Playbook File.
|
||||
Notes:
|
||||
Properties are specified as type "code", which are LOINC Part codes (LP-)
|
||||
Converted the attribute names from LOINC style to FHIR style b/c they contained periods
|
||||
Maneuver sub-attributes are being released in 2016 12.
|
||||
-->
|
||||
<property>
|
||||
<code value="rad-modality-modality-type"/>
|
||||
<uri value="http://loinc.org/property/rad-modality-type"/>
|
||||
<description value="Modality is used to represent the device used to acquire imaging information."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-modality-modality-subtype"/>
|
||||
<uri value="http://loinc.org/property/rad-modality-subtype"/>
|
||||
<description value="Modality subtype may be optionally included to signify a particularly common or evocative configuration of the modality."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-anatomic-location-region-imaged"/>
|
||||
<uri value="http://loinc.org/property/rad-anatomic-location-region-imaged"/>
|
||||
<description value="The Anatomic Location Region Imaged attribute is used in two ways: as a coarse-grained descriptor of the area imaged and a grouper for finding related imaging exams; or, it is used just as a grouper."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-anatomic-location-imaging-focus"/>
|
||||
<uri value="http://loinc.org/property/rad-anatomic-location-imaging-focus"/>
|
||||
<description value="The Anatomic Location Imaging Focus is a more fine-grained descriptor of the specific target structure of an imaging exam. In many areas, the focus should be a specific organ."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-anatomic-location-laterality-presence"/>
|
||||
<uri value="http://loinc.org/property/rad-anatomic-location-laterality-presence"/>
|
||||
<description value="Radiology Exams that require laterality to be specified in order to be performed are signified with an Anatomic Location Laterality Presence attribute set to 'True'"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-anatomic-location-laterality"/>
|
||||
<uri value="http://loinc.org/property/rad-anatomic-location-laterality"/>
|
||||
<description value="Radiology exam Laterality is specified as one of: Left, Right, Bilateral, Unilateral, Unspecified"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-view-view-aggregation"/>
|
||||
<uri value="http://loinc.org/property/rad-view-aggregation"/>
|
||||
<description value="Aggregation describes the extent of the imaging performed, whether in quantitative terms (e.g., '3 or more views') or subjective terms (e.g., 'complete')."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-view-view-type"/>
|
||||
<uri value="http://loinc.org/property/rad-view-view-type"/>
|
||||
<description value="View type names specific views, such as 'lateral' or 'AP'."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-maneuver-maneuver-type"/>
|
||||
<uri value="http://loinc.org/property/rad-maneuver-maneuver-type"/>
|
||||
<description value="Maneuver type indicates an action taken with the goal of elucidating or testing a dynamic aspect of the anatomy."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-timing"/>
|
||||
<uri value="http://loinc.org/property/rad-timing"/>
|
||||
<description value="The Timing/Existence property used in conjunction with pharmaceutical and manueuver properties. It specifies whether or not the imaging occurs in the presence of the administered pharmaceutical or a manuever designed to test some dynamic aspect of anatomy or physiology ."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-pharmaceutical-substance-given"/>
|
||||
<uri value="http://loinc.org/property/rad-pharmaceutical-substance-given"/>
|
||||
<description value="The Pharmaceutical Substance Given specifies administered contrast agents, radiopharmaceuticals, medications, or other clinically important agents and challenges during the imaging procedure."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-pharmaceutical-route"/>
|
||||
<uri value="http://loinc.org/property/rad-pharmaceutical-route"/>
|
||||
<description value="Route specifies the route of administration of the pharmeceutical."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-reason-for-exam"/>
|
||||
<uri value="http://loinc.org/property/rad-reason-for-exam"/>
|
||||
<description value="Reason for exam is used to describe a clinical indication or a purpose for the study."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-guidance-for-presence"/>
|
||||
<uri value="http://loinc.org/property/rad-guidance-for-presence"/>
|
||||
<description value="Guidance for.Presence indicates when a procedure is guided by imaging."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-guidance-for-approach"/>
|
||||
<uri value="http://loinc.org/property/rad-guidance-for-approach"/>
|
||||
<description value="Guidance for.Approach refers to the primary route of access used, such as percutaneous, transcatheter, or transhepatic."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-guidance-for-action"/>
|
||||
<uri value="http://loinc.org/property/rad-guidance-for-action"/>
|
||||
<description value="Guidance for.Action indicates the intervention performed, such as biopsy, aspiration, or ablation."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-guidance-for-object"/>
|
||||
<uri value="http://loinc.org/property/rad-guidance-for-object"/>
|
||||
<description value="Guidance for.Object specifies the target of the action, such as mass, abscess or cyst."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="rad-subject"/>
|
||||
<uri value="http://loinc.org/property/rad-subject"/>
|
||||
<description value="Subject is intended for use when there is a need to distinguish between the patient associated with an imaging study, and the target of the study."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<!-- Document Ontology properties. These apply only to terms in the LOINC Document Ontology File
|
||||
Notes:
|
||||
Properties are specified as type "code", which are LOINC Part codes (LP-)
|
||||
Converted the attribute names from LOINC style to FHIR style b/c they contained periods
|
||||
-->
|
||||
<property>
|
||||
<code value="document-kind"/>
|
||||
<uri value="http://loinc.org/property/document-kind"/>
|
||||
<description value="Characterizes the general structure of the document at a macro level."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="document-role"/>
|
||||
<uri value="http://loinc.org/property/document-role"/>
|
||||
<description value="Characterizes the training or professional level of the author of the document, but does not break down to specialty or subspecialty.."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="document-setting"/>
|
||||
<uri value="http://loinc.org/property/document-setting"/>
|
||||
<description value="Setting is a modest extension of CMS’s coarse definition of care settings, such as outpatient, hospital, etc. Setting is not equivalent to location, which typically has more locally defined meanings."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="document-subject-matter-domain"/>
|
||||
<uri value="http://loinc.org/property/document-subject-matter-domain"/>
|
||||
<description value="Characterizes the clinical domain that is the subject of the document. For example, Internal Medicine, Neurology, Physical Therapy, etc."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="document-type-of-service"/>
|
||||
<uri value="http://loinc.org/property/document-type-of-service"/>
|
||||
<description value="Characterizes the kind of service or activity provided to/for the patient (or other subject of the service) that is described in the document."/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<!-- Answer list related properties -->
|
||||
<property>
|
||||
<code value="answer-list"/>
|
||||
<uri value="http://loinc.org/property/answer-list"/>
|
||||
<description value="An answer list associated with this LOINC code (if there are matching answer lists defined). Only on normal LOINC Codes"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<!-- Note: We expect to add an AnswerListType property when LOINC publishes new answer list file format in June 2017 -->
|
||||
<property>
|
||||
<code value="answers-for"/>
|
||||
<uri value="http://loinc.org/property/answers-for"/>
|
||||
<description value="A LOINC Code for which this answer list is used. Only on normal LL- Codes"/>
|
||||
<type value="Coding"/>
|
||||
</property>
|
||||
<!-- Note for future consideration. These are properties of LA codes in the context of a particular list. Not global properties
|
||||
<property>
|
||||
<code value="sequence"/>
|
||||
<uri value="http://loinc.org/property/sequence"/>
|
||||
<description value="Sequence Number of a answer in a set of answers (LA- codes only)"/>
|
||||
<type value="integer"/>
|
||||
</property>
|
||||
<property>
|
||||
<code value="score"/>
|
||||
<uri value="http://loinc.org/property/score"/>
|
||||
<description value="Score assigned to an answer (LA- codes only)"/>
|
||||
<type value="integer"/>
|
||||
</property>
|
||||
-->
|
||||
</CodeSystem>
|
||||
|
|
@ -13,6 +13,7 @@ import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
|||
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
|
|
|
@ -4,12 +4,12 @@ import static org.junit.Assert.assertNotEquals;
|
|||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
|
||||
public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
||||
|
@ -18,13 +18,13 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
|||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testIndexContained() throws Exception {
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
|
||||
String input = IOUtils.toString(getClass().getResource("/dstu3_codesystem_complete.json"), StandardCharsets.UTF_8);
|
||||
CodeSystem cs = myFhirCtx.newJsonParser().parseResource(CodeSystem.class, input);
|
||||
|
|
|
@ -11,10 +11,8 @@ import java.util.*;
|
|||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.*;
|
||||
import org.hl7.fhir.dstu3.model.ContactPoint.ContactPointSystem;
|
||||
|
|
|
@ -9,6 +9,7 @@ import static org.junit.Assert.fail;
|
|||
|
||||
import java.util.*;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
|
||||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.dstu3.model.AllergyIntolerance.AllergyIntoleranceCategory;
|
||||
import org.hl7.fhir.dstu3.model.AllergyIntolerance.AllergyIntoleranceClinicalStatus;
|
||||
|
@ -24,7 +25,6 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem.LookupCodeResult;
|
|||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
|
@ -48,7 +48,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
public void after() {
|
||||
myDaoConfig.setDeferIndexingForCodesystemsOfSize(new DaoConfig().getDeferIndexingForCodesystemsOfSize());
|
||||
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -67,7 +67,6 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
@ -111,7 +110,6 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "codeA").setDisplay("CodeA");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
@ -149,7 +147,6 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept hello = new TermConcept(cs, "hello").setDisplay("Hello");
|
||||
cs.getConcepts().add(hello);
|
||||
|
@ -486,7 +483,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
|
||||
@Test
|
||||
public void testExpandWithIsAInExternalValueSetReindex() {
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
|
||||
createExternalCsAndLocalVs();
|
||||
|
||||
|
@ -714,7 +711,6 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
|
||||
cs.getConcepts().add(parentA);
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), "http://snomed.info/sct", cs);
|
||||
|
|
|
@ -23,12 +23,10 @@ import org.hl7.fhir.dstu3.model.Quantity.QuantityComparator;
|
|||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.junit.*;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.*;
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.hl7.fhir.r4.model.CodeSystem;
|
|||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
|
||||
public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
||||
|
@ -18,13 +18,13 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
|||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testIndexContained() throws Exception {
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
|
||||
String input = IOUtils.toString(getClass().getResource("/r4/codesystem_complete.json"), StandardCharsets.UTF_8);
|
||||
CodeSystem cs = myFhirCtx.newJsonParser().parseResource(CodeSystem.class, input);
|
||||
|
|
|
@ -9,6 +9,7 @@ import static org.junit.Assert.fail;
|
|||
|
||||
import java.util.*;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.AllergyIntolerance.AllergyIntoleranceCategory;
|
||||
import org.hl7.fhir.r4.model.AllergyIntolerance.AllergyIntoleranceClinicalStatus;
|
||||
|
@ -24,7 +25,6 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem.LookupCodeResult;
|
|||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
|
@ -48,7 +48,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
public void after() {
|
||||
myDaoConfig.setDeferIndexingForCodesystemsOfSize(new DaoConfig().getDeferIndexingForCodesystemsOfSize());
|
||||
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -67,7 +67,6 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
@ -111,7 +110,6 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "codeA").setDisplay("CodeA");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
@ -149,7 +147,6 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept hello = new TermConcept(cs, "hello").setDisplay("Hello");
|
||||
cs.getConcepts().add(hello);
|
||||
|
@ -486,7 +483,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
|
||||
@Test
|
||||
public void testExpandWithIsAInExternalValueSetReindex() {
|
||||
BaseHapiTerminologySvc.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
HapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
|
||||
createExternalCsAndLocalVs();
|
||||
|
||||
|
@ -714,7 +711,6 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
|
||||
cs.getConcepts().add(parentA);
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), "http://snomed.info/sct", cs);
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.stringContainsInOrder;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
|
|
@ -551,7 +551,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
|
|
@ -492,7 +492,6 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "ParentA").setDisplay("Parent A");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
|
|
@ -0,0 +1,146 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyListOf;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class TerminologyLoaderSvcLoincTest {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcLoincTest.class);
|
||||
private TerminologyLoaderSvc mySvc;
|
||||
|
||||
@Mock
|
||||
private IHapiTerminologySvc myTermSvc;
|
||||
|
||||
@Mock
|
||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
||||
|
||||
@Captor
|
||||
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
||||
private ArrayList<byte[]> myFiles;
|
||||
@Captor
|
||||
private ArgumentCaptor<CodeSystem> mySystemCaptor;
|
||||
@Mock
|
||||
private RequestDetails details;
|
||||
@Captor
|
||||
private ArgumentCaptor<List<ValueSet>> myValueSetsCaptor;
|
||||
|
||||
|
||||
private void addFile(String theClasspathPrefix, String theClasspathFileName, String theOutputFilename) throws IOException {
|
||||
ByteArrayOutputStream bos;
|
||||
bos = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||
ourLog.info("Adding {} to test zip", theClasspathFileName);
|
||||
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + theOutputFilename));
|
||||
String classpathName = theClasspathPrefix + theClasspathFileName;
|
||||
InputStream stream = getClass().getResourceAsStream(classpathName);
|
||||
Validate.notNull(stream, "Couldn't load " + classpathName);
|
||||
byte[] byteArray = IOUtils.toByteArray(stream);
|
||||
Validate.notNull(byteArray);
|
||||
zos.write(byteArray);
|
||||
zos.closeEntry();
|
||||
zos.close();
|
||||
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
|
||||
myFiles.add(bos.toByteArray());
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
mySvc = new TerminologyLoaderSvc();
|
||||
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
||||
|
||||
myFiles = new ArrayList<>();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLoadLoinc() throws Exception {
|
||||
addFile("/loinc/", "loinc.csv", TerminologyLoaderSvc.LOINC_FILE);
|
||||
addFile("/loinc/", "hierarchy.csv", TerminologyLoaderSvc.LOINC_HIERARCHY_FILE);
|
||||
addFile("/loinc/", "AnswerList_Beta_1.csv", TerminologyLoaderSvc.LOINC_ANSWERLIST_FILE);
|
||||
|
||||
// Actually do the load
|
||||
mySvc.loadLoinc(myFiles, details);
|
||||
|
||||
verify(myTermSvcDstu3, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture());
|
||||
|
||||
TermCodeSystemVersion ver = myCsvCaptor.getValue();
|
||||
|
||||
Map<String, TermConcept> concepts = new HashMap<>();
|
||||
for (TermConcept next : ver.getConcepts()) {
|
||||
concepts.put(next.getCode(), next);
|
||||
}
|
||||
|
||||
// Normal loinc code
|
||||
TermConcept code = concepts.get("10013-1");
|
||||
assertEquals("10013-1", code.getCode());
|
||||
assertEquals("Elpot", code.getProperty("PROPERTY"));
|
||||
assertEquals("Pt", code.getProperty("TIME_ASPCT"));
|
||||
assertEquals("R' wave amplitude in lead I", code.getDisplay());
|
||||
|
||||
// Answer list
|
||||
code = concepts.get("LL1001-8");
|
||||
assertEquals("LL1001-8", code.getCode());
|
||||
assertEquals("PhenX05_14_30D freq amts", code.getDisplay());
|
||||
|
||||
// Answer list code
|
||||
code = concepts.get("LA13834-9");
|
||||
assertEquals("LA13834-9", code.getCode());
|
||||
assertEquals("1-2 times per week", code.getDisplay());
|
||||
assertEquals(3, code.getSequence().intValue());
|
||||
|
||||
// AnswerList valueSet
|
||||
Map<String, ValueSet> valueSets = new HashMap<>();
|
||||
for (ValueSet next : myValueSetsCaptor.getValue()) {
|
||||
valueSets.put(next.getId(), next);
|
||||
}
|
||||
ValueSet vs = valueSets.get("LL1001-8");
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, vs.getIdentifier().get(0).getSystem());
|
||||
assertEquals("LL1001-8", vs.getIdentifier().get(0).getValue());
|
||||
assertEquals("PhenX05_14_30D freq amts", vs.getName());
|
||||
assertEquals("urn:oid:1.3.6.1.4.1.12009.10.1.166", vs.getUrl());
|
||||
assertEquals(1, vs.getCompose().getInclude().size());
|
||||
assertEquals(6, vs.getCompose().getInclude().get(0).getConcept().size());
|
||||
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URL, vs.getCompose().getInclude().get(0).getSystem());
|
||||
assertEquals("LA6270-8", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
|
||||
assertEquals("Never", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,92 +1,71 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.junit.*;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.*;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyListOf;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class TerminologyLoaderSvcTest {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcTest.class);
|
||||
public class TerminologyLoaderSvcSnomedCtTest {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcSnomedCtTest.class);
|
||||
private TerminologyLoaderSvc mySvc;
|
||||
|
||||
@Mock
|
||||
private IHapiTerminologySvc myTermSvc;
|
||||
|
||||
@Captor
|
||||
private ArgumentCaptor<TermCodeSystemVersion> myCsvCaptor;
|
||||
@Mock
|
||||
private IHapiTerminologySvcDstu3 myTermSvcDstu3;
|
||||
|
||||
private void addEntry(ZipOutputStream zos, String theClasspathPrefix, String theFileName) throws IOException {
|
||||
ourLog.info("Adding {} to test zip", theFileName);
|
||||
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + theFileName));
|
||||
byte[] byteArray = IOUtils.toByteArray(getClass().getResourceAsStream(theClasspathPrefix + theFileName));
|
||||
Validate.notNull(byteArray);
|
||||
zos.write(byteArray);
|
||||
zos.closeEntry();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
mySvc = new TerminologyLoaderSvc();
|
||||
mySvc.setTermSvcForUnitTests(myTermSvc);
|
||||
mySvc.setTermSvcDstu3ForUnitTest(myTermSvcDstu3);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
private List<byte[]> list(byte[]... theByteArray) {
|
||||
return new ArrayList<>(Arrays.asList(theByteArray));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLoadLoinc() throws Exception {
|
||||
ByteArrayOutputStream bos1 = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos1 = new ZipOutputStream(bos1);
|
||||
addEntry(zos1, "/loinc/", "loinc.csv");
|
||||
zos1.close();
|
||||
ourLog.info("ZIP file has {} bytes", bos1.toByteArray().length);
|
||||
|
||||
ByteArrayOutputStream bos2 = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos2 = new ZipOutputStream(bos2);
|
||||
addEntry(zos2, "/loinc/", "LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV");
|
||||
zos2.close();
|
||||
ourLog.info("ZIP file has {} bytes", bos2.toByteArray().length);
|
||||
|
||||
RequestDetails details = mock(RequestDetails.class);
|
||||
mySvc.loadLoinc(list(bos1.toByteArray(), bos2.toByteArray()), details);
|
||||
|
||||
verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class));
|
||||
|
||||
TermCodeSystemVersion ver = myCsvCaptor.getValue();
|
||||
TermConcept code = ver.getConcepts().iterator().next();
|
||||
assertEquals("10013-1", code.getCode());
|
||||
|
||||
}
|
||||
|
||||
@Captor
|
||||
private ArgumentCaptor<String> mySystemCaptor;
|
||||
|
||||
|
||||
/**
|
||||
* This is just for trying stuff, it won't run without
|
||||
* local files external to the git repo
|
||||
*/
|
||||
@Ignore
|
||||
@Test
|
||||
public void testLoadSnomedCtAgainstRealFile() throws Exception {
|
||||
byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
|
||||
|
||||
RequestDetails details = mock(RequestDetails.class);
|
||||
mySvc.loadSnomedCt(list(bytes), details);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testLoadSnomedCt() throws Exception {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -105,7 +84,7 @@ public class TerminologyLoaderSvcTest {
|
|||
RequestDetails details = mock(RequestDetails.class);
|
||||
mySvc.loadSnomedCt(list(bos.toByteArray()), details);
|
||||
|
||||
verify(myTermSvc).storeNewCodeSystemVersion(any(String.class), myCsvCaptor.capture(), any(RequestDetails.class));
|
||||
verify(myTermSvcDstu3).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyListOf(ValueSet.class));
|
||||
|
||||
TermCodeSystemVersion csv = myCsvCaptor.getValue();
|
||||
TreeSet<String> allCodes = toCodes(csv, true);
|
||||
|
@ -119,25 +98,17 @@ public class TerminologyLoaderSvcTest {
|
|||
assertThat(allCodes, hasItem("126816002"));
|
||||
}
|
||||
|
||||
private List<byte[]> list(byte[]... theByteArray) {
|
||||
return new ArrayList<byte[]>(Arrays.asList(theByteArray));
|
||||
}
|
||||
/**
|
||||
* This is just for trying stuff, it won't run without
|
||||
* local files external to the git repo
|
||||
*/
|
||||
@Ignore
|
||||
@Test
|
||||
public void testLoadSnomedCtAgainstRealFile() throws Exception {
|
||||
byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
|
||||
|
||||
private TreeSet<String> toCodes(TermCodeSystemVersion theCsv, boolean theAddChildren) {
|
||||
TreeSet<String> retVal = new TreeSet<String>();
|
||||
for (TermConcept next : theCsv.getConcepts()) {
|
||||
toCodes(retVal, next, theAddChildren);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void toCodes(TreeSet<String> theCodes, TermConcept theConcept, boolean theAddChildren) {
|
||||
theCodes.add(theConcept.getCode());
|
||||
if (theAddChildren) {
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
toCodes(theCodes, next.getChild(), theAddChildren);
|
||||
}
|
||||
}
|
||||
RequestDetails details = mock(RequestDetails.class);
|
||||
mySvc.loadSnomedCt(list(bytes), details);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -158,13 +129,26 @@ public class TerminologyLoaderSvcTest {
|
|||
}
|
||||
}
|
||||
|
||||
private void addEntry(ZipOutputStream zos, String theClasspathPrefix, String theFileName) throws IOException {
|
||||
ourLog.info("Adding {} to test zip", theFileName);
|
||||
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + theFileName));
|
||||
byte[] byteArray = IOUtils.toByteArray(getClass().getResourceAsStream(theClasspathPrefix + theFileName));
|
||||
Validate.notNull(byteArray);
|
||||
zos.write(byteArray);
|
||||
zos.closeEntry();
|
||||
private TreeSet<String> toCodes(TermCodeSystemVersion theCsv, boolean theAddChildren) {
|
||||
TreeSet<String> retVal = new TreeSet<>();
|
||||
for (TermConcept next : theCsv.getConcepts()) {
|
||||
toCodes(retVal, next, theAddChildren);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void toCodes(TreeSet<String> theCodes, TermConcept theConcept, boolean theAddChildren) {
|
||||
theCodes.add(theConcept.getCode());
|
||||
if (theAddChildren) {
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
toCodes(theCodes, next.getChild(), theAddChildren);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,22 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem.CodeSystemContentMode;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
|
@ -25,10 +9,25 @@ import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem.CodeSystemContentMode;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
|
||||
|
||||
private static final String CS_URL = "http://example.com/my_code_system";
|
||||
private static final String CS_URL_2 = "http://example.com/my_code_system2";
|
||||
|
||||
|
||||
@AfterClass
|
||||
|
@ -48,7 +47,6 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parent = new TermConcept();
|
||||
parent.setCodeSystem(cs);
|
||||
|
@ -159,6 +157,10 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
assertThat(mySystemDao.performReindexingPass(100), greaterThan(0));
|
||||
}
|
||||
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemDao myTermCodeSystemDao;
|
||||
|
||||
private IIdType createCodeSystem() {
|
||||
CodeSystem codeSystem = new CodeSystem();
|
||||
codeSystem.setUrl(CS_URL);
|
||||
|
@ -169,7 +171,6 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "ParentA");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
@ -178,9 +179,13 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
parentA.addChild(childAA, RelationshipTypeEnum.ISA);
|
||||
|
||||
TermConcept childAAA = new TermConcept(cs, "childAAA");
|
||||
childAAA.addProperty("propA", "valueAAA");
|
||||
childAAA.addProperty("propB", "foo");
|
||||
childAA.addChild(childAAA, RelationshipTypeEnum.ISA);
|
||||
|
||||
TermConcept childAAB = new TermConcept(cs, "childAAB");
|
||||
childAAB.addProperty("propA", "valueAAB");
|
||||
childAAB.addProperty("propB", "foo");
|
||||
childAA.addChild(childAAB, RelationshipTypeEnum.ISA);
|
||||
|
||||
TermConcept childAB = new TermConcept(cs, "childAB");
|
||||
|
@ -189,10 +194,30 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
TermConcept parentB = new TermConcept(cs, "ParentB");
|
||||
cs.getConcepts().add(parentB);
|
||||
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), "http://foo", cs);
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, cs);
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
|
||||
private IIdType createCodeSystem2() {
|
||||
CodeSystem codeSystem = new CodeSystem();
|
||||
codeSystem.setUrl(CS_URL_2);
|
||||
codeSystem.setContent(CodeSystemContentMode.NOTPRESENT);
|
||||
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
|
||||
|
||||
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
|
||||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
|
||||
TermConcept parentA = new TermConcept(cs, "CS2");
|
||||
cs.getConcepts().add(parentA);
|
||||
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL_2, cs);
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFindCodesAbove() {
|
||||
IIdType id = createCodeSystem();
|
||||
|
@ -214,7 +239,82 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
assertThat(codes, empty());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testExpandValueSetPropertySearch() {
|
||||
createCodeSystem();
|
||||
createCodeSystem2();
|
||||
|
||||
List<String> codes;
|
||||
ValueSet vs;
|
||||
ValueSet outcome;
|
||||
ValueSet.ConceptSetComponent include;
|
||||
|
||||
// Property matches one code
|
||||
vs = new ValueSet();
|
||||
include = vs.getCompose().addInclude();
|
||||
include.setSystem(CS_URL);
|
||||
include
|
||||
.addFilter()
|
||||
.setProperty("propA")
|
||||
.setOp(ValueSet.FilterOperator.EQUAL)
|
||||
.setValue("valueAAA");
|
||||
outcome = myTermSvc.expandValueSet(vs);
|
||||
codes = toCodesContains(outcome.getExpansion().getContains());
|
||||
assertThat(codes, containsInAnyOrder("childAAA"));
|
||||
|
||||
// Property matches several codes
|
||||
vs = new ValueSet();
|
||||
include = vs.getCompose().addInclude();
|
||||
include.setSystem(CS_URL);
|
||||
include
|
||||
.addFilter()
|
||||
.setProperty("propB")
|
||||
.setOp(ValueSet.FilterOperator.EQUAL)
|
||||
.setValue("foo");
|
||||
outcome = myTermSvc.expandValueSet(vs);
|
||||
codes = toCodesContains(outcome.getExpansion().getContains());
|
||||
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
|
||||
|
||||
// Property matches no codes
|
||||
vs = new ValueSet();
|
||||
include = vs.getCompose().addInclude();
|
||||
include.setSystem(CS_URL_2);
|
||||
include
|
||||
.addFilter()
|
||||
.setProperty("propA")
|
||||
.setOp(ValueSet.FilterOperator.EQUAL)
|
||||
.setValue("valueAAA");
|
||||
outcome = myTermSvc.expandValueSet(vs);
|
||||
codes = toCodesContains(outcome.getExpansion().getContains());
|
||||
assertThat(codes, empty());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandValueSetWholeSystem() {
|
||||
createCodeSystem();
|
||||
|
||||
List<String> codes;
|
||||
|
||||
ValueSet vs = new ValueSet();
|
||||
ValueSet.ConceptSetComponent include = vs.getCompose().addInclude();
|
||||
include.setSystem(CS_URL);
|
||||
ValueSet outcome = myTermSvc.expandValueSet(vs);
|
||||
|
||||
codes = toCodesContains(outcome.getExpansion().getContains());
|
||||
assertThat(codes, containsInAnyOrder("ParentA", "childAAA", "childAAB", "childAA", "childAB", "ParentB"));
|
||||
}
|
||||
|
||||
private List<String> toCodesContains(List<ValueSet.ValueSetExpansionContainsComponent> theContains) {
|
||||
List<String> retVal = new ArrayList<>();
|
||||
|
||||
for (ValueSet.ValueSetExpansionContainsComponent next : theContains) {
|
||||
retVal.add(next.getCode());
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateDuplicateCodeSystemUri() {
|
||||
CodeSystem codeSystem = new CodeSystem();
|
||||
|
@ -226,7 +326,6 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
|
||||
TermCodeSystemVersion cs = new TermCodeSystemVersion();
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, cs);
|
||||
|
||||
|
@ -237,7 +336,6 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
id = myCodeSystemDao.update(codeSystem, null, true, true, mySrd).getId().toUnqualified();
|
||||
table = myResourceTableDao.findOne(id.getIdPartAsLong());
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, cs);
|
||||
|
||||
// Try to update to a different resource
|
||||
|
@ -247,7 +345,6 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
|
|||
id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
|
||||
table = myResourceTableDao.findOne(id.getIdPartAsLong());
|
||||
cs.setResource(table);
|
||||
cs.setResourceVersionId(table.getVersion());
|
||||
try {
|
||||
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, cs);
|
||||
fail();
|
|
@ -0,0 +1,10 @@
|
|||
"AnswerListId","AnswerListName" ,"AnswerListOID" ,"ExtDefinedYN","ExtDefinedAnswerListCodeSystem","ExtDefinedAnswerListLink","AnswerStringId","LocalAnswerCode","LocalAnswerCodeSystem","SequenceNumber","DisplayText" ,"ExtCodeId","ExtCodeDisplayName","ExtCodeSystem","ExtCodeSystemVersion","ExtCodeSystemCopyrightNotice","SubsequentTextPrompt","Description","Score"
|
||||
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13825-7" ,"1" , ,1 ,"1 slice or 1 dinner roll" , , , , , , , ,
|
||||
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13838-0" ,"2" , ,2 ,"2 slices or 2 dinner rolls" , , , , , , , ,
|
||||
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13892-7" ,"3" , ,3 ,"More than 2 slices or 2 dinner rolls", , , , , , , ,
|
||||
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA6270-8" ,"00" , ,1 ,"Never" , , , , , , , ,
|
||||
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13836-4" ,"01" , ,2 ,"1-3 times per month" , , , , , , , ,
|
||||
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13834-9" ,"02" , ,3 ,"1-2 times per week" , , , , , , , ,
|
||||
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13853-9" ,"03" , ,4 ,"3-4 times per week" , , , , , , , ,
|
||||
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13860-4" ,"04" , ,5 ,"5-6 times per week" , , , , , , , ,
|
||||
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13827-3" ,"05" , ,6 ,"1 time per day" , , , , , , , ,
|
Can't render this file because it contains an unexpected character in line 1 and column 31.
|
|
@ -0,0 +1,11 @@
|
|||
"LoincNumber","LongCommonName" ,"AnswerListId","AnswerListName" ,"AnswerListLinkType","ApplicableContext"
|
||||
"10061-0" ,"S' wave amplitude in lead I" ,"LL1311-1" ,"PhenX12_44" ,"EXAMPLE" ,
|
||||
"10331-7" ,"Rh [Type] in Blood" ,"LL360-9" ,"Pos|Neg" ,"EXAMPLE" ,
|
||||
"10389-5" ,"Blood product.other [Type]" ,"LL2413-4" ,"Othr bld prod" ,"EXAMPLE" ,
|
||||
"10390-3" ,"Blood product special preparation [Type]" ,"LL2422-5" ,"Blood prod treatment" ,"EXAMPLE" ,
|
||||
"10393-7" ,"Factor IX given [Type]" ,"LL2420-9" ,"Human/Recomb" ,"EXAMPLE" ,
|
||||
"10395-2" ,"Factor VIII given [Type]" ,"LL2420-9" ,"Human/Recomb" ,"EXAMPLE" ,
|
||||
"10401-8" ,"Immune serum globulin given [Type]" ,"LL2421-7" ,"IM/IV" ,"EXAMPLE" ,
|
||||
"10410-9" ,"Plasma given [Type]" ,"LL2417-5" ,"Plasma type" ,"EXAMPLE" ,
|
||||
"10568-4" ,"Clarity of Semen" ,"LL2427-4" ,"Clear/Opales/Milky" ,"EXAMPLE" ,
|
||||
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]","LL1000-0" ,"PhenX05_13_30D bread amt","NORMATIVE" ,
|
Can't render this file because it contains an unexpected character in line 1 and column 30.
|
|
@ -0,0 +1,10 @@
|
|||
PATH_TO_ROOT,SEQUENCE,IMMEDIATE_PARENT,CODE,CODE_TEXT
|
||||
,1,,LP31755-9,Microbiology
|
||||
LP31755-9,1,LP31755-9,LP14559-6,Microorganism
|
||||
LP31755-9.LP14559-6,1,LP14559-6,LP98185-9,Bacteria
|
||||
LP31755-9.LP14559-6.LP98185-9,1,LP98185-9,LP14082-9,Bacteria
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9,1,LP14082-9,LP52258-8,Bacteria | Body Fluid
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52258-8,1,LP52258-8,41599-2,Bacteria Fld Ql Micro
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9,2,LP14082-9,LP52260-4,Bacteria | Cerebral spinal fluid
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9.LP52260-4,1,LP52260-4,41602-4,Bacteria CSF Ql Micro
|
||||
LP31755-9.LP14559-6.LP98185-9.LP14082-9,3,LP14082-9,LP52960-9,Bacteria | Cervix
|
|
|
@ -1,10 +1,11 @@
|
|||
"LOINC_NUM","COMPONENT","PROPERTY","TIME_ASPCT","SYSTEM","SCALE_TYP","METHOD_TYP","CLASS","SOURCE","VersionLastChanged","CHNG_TYPE","DefinitionDescription","STATUS","CONSUMER_NAME","CLASSTYPE","FORMULA","SPECIES","EXMPL_ANSWERS","SURVEY_QUEST_TEXT","SURVEY_QUEST_SRC","UNITSREQUIRED","SUBMITTED_UNITS","RELATEDNAMES2","SHORTNAME","ORDER_OBS","CDISC_COMMON_TESTS","HL7_FIELD_SUBFIELD_ID","EXTERNAL_COPYRIGHT_NOTICE","EXAMPLE_UNITS","LONG_COMMON_NAME","UnitsAndRange","DOCUMENT_SECTION","EXAMPLE_UCUM_UNITS","EXAMPLE_SI_UCUM_UNITS","STATUS_REASON","STATUS_TEXT","CHANGE_REASON_PUBLIC","COMMON_TEST_RANK","COMMON_ORDER_RANK","COMMON_SI_TEST_RANK","HL7_ATTACHMENT_STRUCTURE","EXTERNAL_COPYRIGHT_LINK","PanelType","AskAtOrderEntry","AssociatedObservations"
|
||||
"10013-1","R' wave amplitude.lead I","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-I; R wave Amp L-I; Random; Right; Voltage","R' wave Amp L-I","Observation",,,,"mV","R' wave amplitude in lead I",,,"mV",,,,,0,0,0,,,,,
|
||||
"10014-9","R' wave amplitude.lead II","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"2; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-II; R wave Amp L-II; Random; Right; Voltage","R' wave Amp L-II","Observation",,,,"mV","R' wave amplitude in lead II",,,"mV",,,,,0,0,0,,,,,
|
||||
"10015-6","R' wave amplitude.lead III","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"3; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-III; R wave Amp L-III; Random; Right; Voltage","R' wave Amp L-III","Observation",,,,"mV","R' wave amplitude in lead III",,,"mV",,,,,0,0,0,,,,,
|
||||
"10016-4","R' wave amplitude.lead V1","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V1; R wave Amp L-V1; Random; Right; Voltage","R' wave Amp L-V1","Observation",,,,"mV","R' wave amplitude in lead V1",,,"mV",,,,,0,0,0,,,,,
|
||||
"1001-7","DBG Ab","Pr","Pt","Ser/Plas^donor","Ord",,"BLDBK","FS","2.44","MIN",,"ACTIVE",,1,,,,,,,,"ABS; Aby; Antby; Anti; Antibodies; Antibody; Autoantibodies; Autoantibody; BLOOD BANK; Donna Bennett-Goodspeed; Donr; Ordinal; Pl; Plasma; Plsm; Point in time; QL; Qual; Qualitative; Random; Screen; SerP; SerPl; SerPl^donor; SerPlas; Serum; Serum or plasma; SR","DBG Ab SerPl Donr Ql","Observation",,,,,"DBG Ab [Presence] in Serum or Plasma from donor",,,,,,,"The Property has been changed from ACnc to Pr (Presence) to reflect the new model for ordinal terms where results are based on presence or absence.",0,0,0,,,,,
|
||||
"10017-2","R' wave amplitude.lead V2","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V2; R wave Amp L-V2; Random; Right; Voltage","R' wave Amp L-V2","Observation",,,,"mV","R' wave amplitude in lead V2",,,"mV",,,,,0,0,0,,,,,
|
||||
"10018-0","R' wave amplitude.lead V3","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V3; R wave Amp L-V3; Random; Right; Voltage","R' wave Amp L-V3","Observation",,,,"mV","R' wave amplitude in lead V3",,,"mV",,,,,0,0,0,,,,,
|
||||
"10019-8","R' wave amplitude.lead V4","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V4; R wave Amp L-V4; Random; Right; Voltage","R' wave Amp L-V4","Observation",,,,"mV","R' wave amplitude in lead V4",,,"mV",,,,,0,0,0,,,,,
|
||||
"10020-6","R' wave amplitude.lead V5","Elpot","Pt","Heart","Qn","EKG","EKG.MEAS","CH","2.48","MIN",,"ACTIVE",,2,,,,,,"Y",,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V5; R wave Amp L-V5; Random; Right; Voltage","R' wave Amp L-V5","Observation",,,,"mV","R' wave amplitude in lead V5",,,"mV",,,,,0,0,0,,,,,
|
||||
"LOINC_NUM","COMPONENT" ,"PROPERTY","TIME_ASPCT","SYSTEM" ,"SCALE_TYP","METHOD_TYP","CLASS" ,"SOURCE","VersionLastChanged","CHNG_TYPE","DefinitionDescription","STATUS","CONSUMER_NAME","CLASSTYPE","FORMULA","SPECIES","EXMPL_ANSWERS","SURVEY_QUEST_TEXT" ,"SURVEY_QUEST_SRC" ,"UNITSREQUIRED","SUBMITTED_UNITS","RELATEDNAMES2" ,"SHORTNAME" ,"ORDER_OBS" ,"CDISC_COMMON_TESTS","HL7_FIELD_SUBFIELD_ID","EXTERNAL_COPYRIGHT_NOTICE","EXAMPLE_UNITS","LONG_COMMON_NAME" ,"UnitsAndRange","DOCUMENT_SECTION","EXAMPLE_UCUM_UNITS","EXAMPLE_SI_UCUM_UNITS","STATUS_REASON","STATUS_TEXT","CHANGE_REASON_PUBLIC" ,"COMMON_TEST_RANK","COMMON_ORDER_RANK","COMMON_SI_TEST_RANK","HL7_ATTACHMENT_STRUCTURE","EXTERNAL_COPYRIGHT_LINK","PanelType","AskAtOrderEntry","AssociatedObservations"
|
||||
"10013-1" ,"R' wave amplitude.lead I" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-I; R wave Amp L-I; Random; Right; Voltage" ,"R' wave Amp L-I" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead I" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"10014-9" ,"R' wave amplitude.lead II" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"2; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-II; R wave Amp L-II; Random; Right; Voltage" ,"R' wave Amp L-II" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead II" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"10015-6" ,"R' wave amplitude.lead III" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"3; Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-III; R wave Amp L-III; Random; Right; Voltage" ,"R' wave Amp L-III" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead III" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"10016-4" ,"R' wave amplitude.lead V1" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V1; R wave Amp L-V1; Random; Right; Voltage" ,"R' wave Amp L-V1" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V1" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"1001-7" ,"DBG Ab" ,"Pr" ,"Pt" ,"Ser/Plas^donor","Ord" , ,"BLDBK" ,"FS" ,"2.44" ,"MIN" , ,"ACTIVE", ,1 , , , , , , , ,"ABS; Aby; Antby; Anti; Antibodies; Antibody; Autoantibodies; Autoantibody; BLOOD BANK; Donna Bennett-Goodspeed; Donr; Ordinal; Pl; Plasma; Plsm; Point in time; QL; Qual; Qualitative; Random; Screen; SerP; SerPl; SerPl^donor; SerPlas; Serum; Serum or plasma; SR","DBG Ab SerPl Donr Ql" ,"Observation", , , , ,"DBG Ab [Presence] in Serum or Plasma from donor" , , , , , , ,"The Property has been changed from ACnc to Pr (Presence) to reflect the new model for ordinal terms where results are based on presence or absence.",0 ,0 ,0 , , , , ,
|
||||
"10017-2" ,"R' wave amplitude.lead V2" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V2; R wave Amp L-V2; Random; Right; Voltage" ,"R' wave Amp L-V2" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V2" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"10018-0" ,"R' wave amplitude.lead V3" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V3; R wave Amp L-V3; Random; Right; Voltage" ,"R' wave Amp L-V3" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V3" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"10019-8" ,"R' wave amplitude.lead V4" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V4; R wave Amp L-V4; Random; Right; Voltage" ,"R' wave Amp L-V4" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V4" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"10020-6" ,"R' wave amplitude.lead V5" ,"Elpot" ,"Pt" ,"Heart" ,"Qn" ,"EKG" ,"EKG.MEAS","CH" ,"2.48" ,"MIN" , ,"ACTIVE", ,2 , , , , , ,"Y" , ,"Cardiac; ECG; EKG.MEASUREMENTS; Electrical potential; Electrocardiogram; Electrocardiograph; Hrt; Painter's colic; PB; Plumbism; Point in time; QNT; Quan; Quant; Quantitative; R prime; R' wave Amp L-V5; R wave Amp L-V5; Random; Right; Voltage" ,"R' wave Amp L-V5" ,"Observation", , , ,"mV" ,"R' wave amplitude in lead V5" , , ,"mV" , , , , ,0 ,0 ,0 , , , , ,
|
||||
"61438-8" ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30D","Find" ,"Pt" ,"^Patient" ,"Ord" ,"PhenX" ,"PHENX" ,"PhenX" ,"2.44" ,"MIN" , ,"TRIAL" , ,2 , , , ,"Each time you eat bread, toast or dinner rolls, how much do you usually eat?","PhenX.050201100100","N" , ,"Finding; Findings; How much bread in 30D; Last; Ordinal; Point in time; QL; Qual; Qualitative; Random; Screen" ,"How much bread in 30D PhenX", , , , , ,"Each time you ate bread, toast or dinner rolls, how much did you usually eat in the past 30 days [PhenX]", , , , , , , ,0 ,0 ,0 , , , , ,
|
||||
|
|
Can't render this file because it contains an unexpected character in line 1 and column 23.
|
Loading…
Reference in New Issue