Work on term svc

This commit is contained in:
jamesagnew 2016-05-24 06:34:06 -04:00
parent ee031667c5
commit efe9cd1dd1
22 changed files with 411 additions and 109 deletions

View File

@ -34,24 +34,30 @@ public enum FhirVersionEnum {
* ***********************
*/
DSTU1("ca.uhn.fhir.model.dstu.FhirDstu1", null, false),
DSTU1("ca.uhn.fhir.model.dstu.FhirDstu1", null, false, "0.0.82"),
DSTU2("ca.uhn.fhir.model.dstu2.FhirDstu2", null, false),
DSTU2("ca.uhn.fhir.model.dstu2.FhirDstu2", null, false, "1.0.2"),
DSTU3("org.hl7.fhir.dstu3.hapi.ctx.FhirDstu3", null, true),
DSTU2_HL7ORG("org.hl7.fhir.instance.FhirDstu2Hl7Org", DSTU2, true, "1.0.2"),
DSTU2_HL7ORG("org.hl7.fhir.instance.FhirDstu2Hl7Org", DSTU2, true);
DSTU3("org.hl7.fhir.dstu3.hapi.ctx.FhirDstu3", null, true, "1.4.0");
private final FhirVersionEnum myEquivalent;
private final String myFhirVersionString;
private final boolean myIsRi;
private volatile Boolean myPresentOnClasspath;
private final String myVersionClass;
private volatile IFhirVersion myVersionImplementation;
FhirVersionEnum(String theVersionClass, FhirVersionEnum theEquivalent, boolean theIsRi) {
FhirVersionEnum(String theVersionClass, FhirVersionEnum theEquivalent, boolean theIsRi, String theFhirVersion) {
myVersionClass = theVersionClass;
myEquivalent = theEquivalent;
myIsRi = theIsRi;
myFhirVersionString = theFhirVersion;
}
public String getFhirVersionString() {
return myFhirVersionString;
}
public IFhirVersion getVersionImplementation() {

View File

@ -30,6 +30,8 @@ public class HttpClientUtil {
b.append("HAPI-FHIR/");
b.append(VersionUtil.getVersion());
b.append(" (FHIR Client; FHIR ");
b.append(theContext.getVersion().getVersion().getFhirVersionString());
b.append('/');
b.append(theContext.getVersion().getVersion().name());
b.append("; ");
b.append(theClientType);

View File

@ -152,7 +152,15 @@ public class RestfulServer extends HttpServlet implements IRestfulServer<Servlet
* </p>
*/
public void addHeadersToResponse(HttpServletResponse theHttpResponse) {
theHttpResponse.addHeader("X-Powered-By", "HAPI FHIR " + VersionUtil.getVersion() + " REST Server (FHIR Server; FHIR " + myFhirContext.getVersion().getVersion().name() + ")");
StringBuilder b = new StringBuilder();
b.append("HAPI FHIR ");
b.append(VersionUtil.getVersion());
b.append(" REST Server (FHIR Server; FHIR ");
b.append(myFhirContext.getVersion().getVersion().getFhirVersionString());
b.append('/');
b.append(myFhirContext.getVersion().getVersion().name());
b.append(")");
theHttpResponse.addHeader("X-Powered-By", b.toString());
}
private void addLocationHeader(RequestDetails theRequest, HttpServletResponse theResponse, MethodOutcome response, String headerLocation, String resourceName) {

View File

@ -48,6 +48,7 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.method.RequestDetails;
@ -199,7 +200,7 @@ public class FhirResourceDaoCodeSystemDstu3 extends FhirResourceDaoDstu3<CodeSys
termConcept.setCode(next.getCode());
termConcept.setCodeSystem(theCodeSystemVersion);
termConcept.setDisplay(next.getDisplay());
termConcept.addChildren(toPersistedConcepts(next.getConcept(), theCodeSystemVersion));
termConcept.addChildren(toPersistedConcepts(next.getConcept(), theCodeSystemVersion), RelationshipTypeEnum.ISA);
retVal.add(termConcept);
}
}

View File

@ -61,8 +61,10 @@ public class ForcedId {
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable=false)
private Long myResourcePid;
// This is updatable=true because it was added in 1.6 and needs to be set.. At some
// point we should remove the default and make it not updatable
@ColumnDefault("''")
@Column(name = "RESOURCE_TYPE", nullable = true, length = 100, updatable = false)
@Column(name = "RESOURCE_TYPE", nullable = true, length = 100, updatable = true)
private String myResourceType;
/**

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.entity;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
* #%L
* HAPI FHIR JPA Server
@ -43,12 +45,18 @@ import javax.persistence.UniqueConstraint;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
@Entity
@Table(name="TRM_CONCEPT", uniqueConstraints= {
@UniqueConstraint(name="IDX_CONCEPT_CS_CODE", columnNames= {"CODESYSTEM_PID", "CODE"})
})
public class TermConcept implements Serializable {
private static final int MAX_DESC_LENGTH = 400;
private static final long serialVersionUID = 1L;
@OneToMany(fetch=FetchType.LAZY, mappedBy="myParent")
@ -61,7 +69,7 @@ public class TermConcept implements Serializable {
@JoinColumn(name="CODESYSTEM_PID", referencedColumnName="PID", foreignKey=@ForeignKey(name="FK_CONCEPT_PID_CS_PID"))
private TermCodeSystemVersion myCodeSystem;
@Column(name="DISPLAY", length=200, nullable=true)
@Column(name="DISPLAY", length=MAX_DESC_LENGTH, nullable=true)
private String myDisplay;
@OneToMany(cascade=CascadeType.ALL, fetch=FetchType.LAZY, mappedBy="myChild")
@ -82,12 +90,12 @@ public class TermConcept implements Serializable {
setCode(theCode);
}
public TermConcept addChild(TermConcept theChild) {
Validate.notNull(theChild.getCodeSystem(), "theChild.getCodeSystem() must not return null");
public TermConcept addChild(TermConcept theChild, RelationshipTypeEnum theRelationshipType) {
Validate.notNull(theRelationshipType, "theRelationshipType must not be null");
TermConceptParentChildLink link = new TermConceptParentChildLink();
link.setParent(this);
link.setCodeSystem(theChild.getCodeSystem());
link.setChild(theChild);
link.setRelationshipType(theRelationshipType);
getChildren().add(link);
return this;
}
@ -111,6 +119,11 @@ public class TermConcept implements Serializable {
return b.isEquals();
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("code", myCode).append("display", myDisplay).build();
}
public Collection<TermConceptParentChildLink> getChildren() {
if (myChildren == null) {
myChildren = new ArrayList<TermConceptParentChildLink>();
@ -154,11 +167,14 @@ public class TermConcept implements Serializable {
public void setDisplay(String theDisplay) {
myDisplay = theDisplay;
if (isNotBlank(theDisplay) && theDisplay.length() > MAX_DESC_LENGTH) {
myDisplay = myDisplay.substring(0, MAX_DESC_LENGTH);
}
}
public void addChildren(List<TermConcept> theChildren) {
public void addChildren(List<TermConcept> theChildren, RelationshipTypeEnum theRelationshipType) {
for (TermConcept next : theChildren) {
addChild(next);
addChild(next, theRelationshipType);
}
}

View File

@ -24,6 +24,8 @@ import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
@ -33,6 +35,8 @@ import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
@Entity
@Table(name="TRM_CONCEPT_PC_LINK")
public class TermConceptParentChildLink implements Serializable {
@ -56,6 +60,10 @@ public class TermConceptParentChildLink implements Serializable {
@Column(name="PID")
private Long myPid;
@Enumerated(EnumType.ORDINAL)
@Column(name="REL_TYPE", length=5, nullable=true)
private RelationshipTypeEnum myRelationshipType;
public TermConcept getChild() {
return myChild;
}
@ -80,4 +88,13 @@ public class TermConceptParentChildLink implements Serializable {
myParent = theParent;
}
public void setRelationshipType(RelationshipTypeEnum theRelationshipType) {
myRelationshipType = theRelationshipType;
}
public enum RelationshipTypeEnum{
ISA
}
}

View File

@ -36,6 +36,7 @@ import com.google.common.base.Stopwatch;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
@ -44,6 +45,7 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.rest.method.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ObjectUtil;
@ -52,9 +54,9 @@ import ca.uhn.fhir.util.ValidateUtil;
public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiTerminologySvc.class);
private static final Object PLACEHOLDER_OBJECT = new Object();
@Autowired
private ITermCodeSystemDao myCodeSystemDao;
protected ITermCodeSystemDao myCodeSystemDao;
@Autowired
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
@ -169,6 +171,10 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
return;
}
if (theConceptsStack.size() % 10000 == 0) {
ourLog.info("Have saved {} concepts",theConceptsStack.size());
}
for (TermConceptParentChildLink next : theConcept.getChildren()) {
persistChildren(next.getChild(), theCodeSystem, theConceptsStack);
}
@ -204,17 +210,25 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
}
}
ourLog.info("Validating code system");
// Validate the code system
IdentityHashMap<TermConcept, Object> conceptsStack = new IdentityHashMap<TermConcept, Object>();
for (TermConcept next : theCodeSystem.getConcepts()) {
validateConceptForStorage(next, theCodeSystem, conceptsStack);
}
ourLog.info("Saving version");
myCodeSystemVersionDao.save(theCodeSystem);
ourLog.info("Saving code system");
codeSystem.setCurrentVersion(theCodeSystem);
myCodeSystemDao.save(codeSystem);
ourLog.info("Saving concepts...");
conceptsStack = new IdentityHashMap<TermConcept, Object>();
for (TermConcept next : theCodeSystem.getConcepts()) {
persistChildren(next, theCodeSystem, conceptsStack);
@ -244,10 +258,12 @@ public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
}
for (TermConceptParentChildLink next : theConcept.getChildren()) {
next.setCodeSystem(theCodeSystem);
validateConceptForStorage(next.getChild(), theCodeSystem, theConceptsStack);
}
theConceptsStack.remove(theConcept);
}
}

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.term;
import java.util.List;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.rest.method.RequestDetails;
public class HapiTerminologySvcDstu1 extends BaseHapiTerminologySvc {
@ -32,7 +33,7 @@ public class HapiTerminologySvcDstu1 extends BaseHapiTerminologySvc {
}
@Override
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion) {
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails) {
throw new UnsupportedOperationException();
}

View File

@ -26,6 +26,7 @@ import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.rest.method.RequestDetails;
public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvc {
@ -40,8 +41,10 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvc {
@Override
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion) {
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails) {
// nothing yet
}
}

View File

@ -24,48 +24,72 @@ import java.util.ArrayList;
import java.util.List;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.dstu3.model.CodeSystem.CodeSystemContentMode;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
import org.hl7.fhir.dstu3.utils.IWorkerContext;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.rest.method.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.UrlUtil;
public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcDstu3.class);
@Autowired
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemResourceDao;
@Autowired
private IWorkerContext myWorkerContext;
@Autowired
private ValueSetExpander myValueSetExpander;
@Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
ValueSet source = new ValueSet();
source.getCompose().addImport(theValueSet);
try {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
ValueSetExpansionOutcome outcome = myValueSetExpander.expand(source);
for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
}
return retVal;
} catch (Exception e) {
throw new InternalErrorException(e);
}
}
@Override
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion) {
public void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails) {
CodeSystem cs = new org.hl7.fhir.dstu3.model.CodeSystem();
cs.setUrl(theSystem);
cs.setContent(CodeSystemContentMode.NOTPRESENT);
IIdType csId = myCodeSystemResourceDao.create(cs, "CodeSystem?url=" + UrlUtil.escape(theSystem), theRequestDetails).getId().toUnqualifiedVersionless();
ResourceTable resource = (ResourceTable) myCodeSystemResourceDao.readEntity(csId);
Long codeSystemResourcePid = resource.getId();
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
theCodeSystemVersion.setResource(resource);
theCodeSystemVersion.setResourceVersionId(resource.getVersion());
super.storeNewCodeSystemVersion(codeSystemResourcePid, theSystem, theCodeSystemVersion);
}
}

View File

@ -26,6 +26,7 @@ import java.util.Set;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.rest.method.RequestDetails;
public interface IHapiTerminologySvc {
@ -43,6 +44,6 @@ public interface IHapiTerminologySvc {
List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode);
void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion);
void storeNewCodeSystemVersion(String theSystem, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails);
}

View File

@ -34,19 +34,23 @@ import java.io.OutputStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.annotations.VisibleForTesting;
@ -54,9 +58,15 @@ import com.google.common.annotations.VisibleForTesting;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.rest.method.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
public class TerminologyLoaderSvc {
static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full";
static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvc.class);
@Autowired
@ -67,10 +77,8 @@ public class TerminologyLoaderSvc {
myTermSvc = theTermSvc;
}
public void loadSnomedCt(byte[] theZipBytes) {
String filenameDescription = "Terminology/sct2_Description_Full";
String filenameRelationship = "Terminology/sct2_Relationship_Full";
List<String> allFilenames = Arrays.asList(filenameDescription, filenameRelationship);
public void loadSnomedCt(byte[] theZipBytes, RequestDetails theRequestDetails) {
List<String> allFilenames = Arrays.asList(SCT_FILE_DESCRIPTION, SCT_FILE_RELATIONSHIP, SCT_FILE_CONCEPT);
Map<String, File> filenameToFile = new HashMap<String, File>();
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(new ByteArrayInputStream(theZipBytes)));
@ -87,7 +95,6 @@ public class TerminologyLoaderSvc {
if (!want) {
ourLog.info("Ignoring zip entry: {}", nextEntry.getName());
IOUtils.copy(inputStream, new SinkOutputStream());
continue;
}
@ -95,9 +102,9 @@ public class TerminologyLoaderSvc {
File nextOutFile = File.createTempFile("hapi_fhir", ".csv");
nextOutFile.deleteOnExit();
OutputStream outputStream = new BufferedOutputStream(new FileOutputStream(nextOutFile, false));
OutputStream outputStream = new SinkOutputStream(new FileOutputStream(nextOutFile, false), nextEntry.getName());
try {
IOUtils.copy(inputStream, outputStream);
IOUtils.copyLarge(inputStream, outputStream);
} finally {
IOUtils.closeQuietly(outputStream);
}
@ -112,71 +119,55 @@ public class TerminologyLoaderSvc {
ourLog.info("Beginning SNOMED CT processing");
processSnomedCtFiles(filenameToFile,theRequestDetails);
}
void processSnomedCtFiles(Map<String, File> filenameToFile, RequestDetails theRequestDetails) {
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
final Map<String, TermConcept> id2concept = new HashMap<String, TermConcept>();
final Map<String, TermConcept> code2concept = new HashMap<String, TermConcept>();
final Set<String> validConceptIds = new HashSet<String>();
final List<TermConceptParentChildLink> links = new ArrayList<TermConceptParentChildLink>();
IRecordHandler handler = new IRecordHandler() {
@Override
public void accept(CSVRecord theRecord) {
String id = theRecord.get("id");
boolean active = "1".equals(theRecord.get("active"));
if (!active) {
return;
}
String conceptId = theRecord.get("conceptId");
String term = theRecord.get("term");
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
iterateOverZipFile(filenameToFile, SCT_FILE_CONCEPT, handler);
TermConcept concept = getOrCreateConcept(codeSystemVersion, id2concept, id);
concept.setCode(conceptId);
concept.setDisplay(term);
code2concept.put(conceptId, concept);
}
};
iterateOverZipFile(filenameToFile, filenameDescription, handler);
ourLog.info("Have {} valid concept IDs", validConceptIds.size());
handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
iterateOverZipFile(filenameToFile, SCT_FILE_DESCRIPTION, handler);
final HashSet<TermConcept> rootConcepts = new HashSet<TermConcept>();
rootConcepts.addAll(code2concept.values());
handler = new IRecordHandler() {
@Override
public void accept(CSVRecord theRecord) {
String sourceId = theRecord.get("sourceId");
String destinationId = theRecord.get("destinationId");
String typeId = theRecord.get("typeId");
boolean active = "1".equals(theRecord.get("active"));
if (!active) {
return;
}
TermConcept typeConcept = findConcept(code2concept, typeId);
TermConcept sourceConcept = findConcept(code2concept, sourceId);
TermConcept targetConcept = findConcept(code2concept, destinationId);
if (typeConcept.getDisplay().equals("Is a")) {
TermConceptParentChildLink link = new TermConceptParentChildLink();
link.setChild(sourceConcept);
link.setParent(targetConcept);
link.setCodeSystem(codeSystemVersion);
rootConcepts.remove(link.getChild());
} else {
ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay());
}
}
private TermConcept findConcept(final Map<String, TermConcept> code2concept, String typeId) {
TermConcept typeConcept = code2concept.get(typeId);
if (typeConcept == null) {
throw new InternalErrorException("Unknown type ID: " + typeId);
}
return typeConcept;
}
};
iterateOverZipFile(filenameToFile, filenameRelationship, handler);
ourLog.info("Got {} concepts, cloning map", code2concept.size());
final HashMap<String, TermConcept> rootConcepts = new HashMap<String, TermConcept>(code2concept);
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
iterateOverZipFile(filenameToFile, SCT_FILE_RELATIONSHIP, handler);
ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(), code2concept.size());
codeSystemVersion.getConcepts().addAll(rootConcepts);
myTermSvc.storeNewCodeSystemVersion("http://snomed.info/sct", codeSystemVersion);
for (TermConcept next : rootConcepts.values()){
dropCircularRefs(next, new HashSet<String>());
}
codeSystemVersion.getConcepts().addAll(rootConcepts.values());
myTermSvc.storeNewCodeSystemVersion("http://snomed.info/sct", codeSystemVersion, theRequestDetails);
}
private void dropCircularRefs(TermConcept theConcept, HashSet<String> theChain) {
for (Iterator<TermConceptParentChildLink> childIter = theConcept.getChildren().iterator(); childIter.hasNext(); ) {
TermConceptParentChildLink next = childIter.next();
TermConcept nextChild = next.getChild();
if (theChain.contains(nextChild.getCode())) {
ourLog.info("Removing circular reference code {} from parent {}", nextChild.getCode(), theConcept.getCode());
childIter.remove();
} else {
theChain.add(theConcept.getCode());
dropCircularRefs(nextChild, theChain);
theChain.remove(theConcept.getCode());
}
}
}
private void iterateOverZipFile(Map<String, File> theFilenameToFile, String fileNamePart, IRecordHandler handler) {
@ -193,9 +184,17 @@ public class TerminologyLoaderSvc {
Iterator<CSVRecord> iter = parsed.iterator();
ourLog.debug("Header map: {}", parsed.getHeaderMap());
int count = 0;
int logIncrement = 100000;
int nextLoggedCount = logIncrement;
while (iter.hasNext()) {
CSVRecord nextRecord = iter.next();
handler.accept(nextRecord);
count++;
if (count >= nextLoggedCount) {
ourLog.info(" * Processed {} records in {}", count, fileNamePart);
nextLoggedCount += logIncrement;
}
}
} catch (IOException e) {
throw new InternalErrorException(e);
@ -217,6 +216,118 @@ public class TerminologyLoaderSvc {
return concept;
}
private final class SctHandlerRelationship implements IRecordHandler {
private final TermCodeSystemVersion myCodeSystemVersion;
private final Map<String, TermConcept> myRootConcepts;
private final Map<String, TermConcept> myCode2concept;
private SctHandlerRelationship(TermCodeSystemVersion theCodeSystemVersion, HashMap<String,TermConcept> theRootConcepts, Map<String, TermConcept> theCode2concept) {
myCodeSystemVersion = theCodeSystemVersion;
myRootConcepts = theRootConcepts;
myCode2concept = theCode2concept;
}
@Override
public void accept(CSVRecord theRecord) {
Set<String> ignoredTypes = new HashSet<String>();
ignoredTypes.add("Method (attribute)");
ignoredTypes.add("Direct device (attribute)");
ignoredTypes.add("Has focus (attribute)");
ignoredTypes.add("Access instrument");
ignoredTypes.add("Procedure site (attribute)");
ignoredTypes.add("Causative agent (attribute)");
ignoredTypes.add("Course (attribute)");
ignoredTypes.add("Finding site (attribute)");
ignoredTypes.add("Has definitional manifestation (attribute)");
String sourceId = theRecord.get("sourceId");
String destinationId = theRecord.get("destinationId");
String typeId = theRecord.get("typeId");
boolean active = "1".equals(theRecord.get("active"));
if (!active) {
return;
}
TermConcept typeConcept = findConcept(myCode2concept, typeId);
TermConcept sourceConcept = findConcept(myCode2concept, sourceId);
TermConcept targetConcept = findConcept(myCode2concept, destinationId);
if (typeConcept.getDisplay().equals("Is a (attribute)")) {
TermConceptParentChildLink link = new TermConceptParentChildLink();
link.setChild(sourceConcept);
link.setParent(targetConcept);
link.setRelationshipType(TermConceptParentChildLink.RelationshipTypeEnum.ISA);
link.setCodeSystem(myCodeSystemVersion);
myRootConcepts.remove(link.getChild().getCode());
targetConcept.addChild(sourceConcept, RelationshipTypeEnum.ISA);
} else if (ignoredTypes.contains(typeConcept.getDisplay())) {
// ignore
} else {
// ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay());
}
}
private TermConcept findConcept(final Map<String, TermConcept> code2concept, String typeId) {
TermConcept typeConcept = code2concept.get(typeId);
if (typeConcept == null) {
throw new InternalErrorException("Unknown type ID: " + typeId);
}
return typeConcept;
}
}
private final class SctHandlerDescription implements IRecordHandler {
private final Map<String, TermConcept> myCode2concept;
private final Map<String, TermConcept> myId2concept;
private final TermCodeSystemVersion myCodeSystemVersion;
private Set<String> myValidConceptIds;
private SctHandlerDescription(Set<String> theValidConceptIds, Map<String, TermConcept> theCode2concept, Map<String, TermConcept> theId2concept, TermCodeSystemVersion theCodeSystemVersion) {
myCode2concept = theCode2concept;
myId2concept = theId2concept;
myCodeSystemVersion = theCodeSystemVersion;
myValidConceptIds = theValidConceptIds;
}
@Override
public void accept(CSVRecord theRecord) {
String id = theRecord.get("id");
boolean active = "1".equals(theRecord.get("active"));
if (!active) {
return;
}
String conceptId = theRecord.get("conceptId");
if (!myValidConceptIds.contains(conceptId)) {
return;
}
String term = theRecord.get("term");
TermConcept concept = getOrCreateConcept(myCodeSystemVersion, myId2concept, id);
concept.setCode(conceptId);
concept.setDisplay(term);
myCode2concept.put(conceptId, concept);
}
}
private final class SctHandlerConcept implements IRecordHandler {
private Set<String> myValidConceptIds;
public SctHandlerConcept(Set<String> theValidConceptIds) {
myValidConceptIds = theValidConceptIds;
}
@Override
public void accept(CSVRecord theRecord) {
String id = theRecord.get("id");
boolean active = "1".equals(theRecord.get("active"));
if (!active) {
return;
}
myValidConceptIds.add(id);
}
}
private static class ZippedFileInputStream extends InputStream {
private ZipInputStream is;
@ -241,26 +352,65 @@ public class TerminologyLoaderSvc {
}
public static void main(String[] args) throws Exception {
byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
TerminologyLoaderSvc svc = new TerminologyLoaderSvc();
svc.loadSnomedCt(bytes);
// byte[] bytes = IOUtils.toByteArray(new FileInputStream("/Users/james/Downloads/SnomedCT_Release_INT_20160131_Full.zip"));
// svc.loadSnomedCt(bytes);
Map<String, File> files = new HashMap<String, File>();
files.put(SCT_FILE_CONCEPT, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Concept_Full_INT_20160131.txt"));
files.put(SCT_FILE_DESCRIPTION, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Description_Full-en_INT_20160131.txt"));
files.put(SCT_FILE_RELATIONSHIP, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Relationship_Full_INT_20160131.txt"));
svc.processSnomedCtFiles(files, null);
}
private static class SinkOutputStream extends OutputStream {
private static final long LOG_INCREMENT = 10 * FileUtils.ONE_MB;
private FileOutputStream myWrap;
private int myBytes;
private long myNextLogCount = LOG_INCREMENT;
private String myFilename;
public SinkOutputStream(FileOutputStream theWrap, String theFilename) {
myWrap = theWrap;
myFilename = theFilename;
}
@Override
public void write(int theB) throws IOException {
// ignore
myWrap.write(theB);
addCount(1);
}
private void addCount(int theCount) {
myBytes += theCount;
if (myBytes > myNextLogCount) {
ourLog.info(" * Wrote {} of {}", FileUtils.byteCountToDisplaySize(myBytes), myFilename);
myNextLogCount = myBytes + LOG_INCREMENT;
}
}
@Override
public void write(byte[] theB) throws IOException {
// ignore
myWrap.write(theB);
addCount(theB.length);
}
@Override
public void write(byte[] theB, int theOff, int theLen) throws IOException {
// ignore
myWrap.write(theB, theOff, theLen);
addCount(theLen);
}
@Override
public void flush() throws IOException {
myWrap.flush();
}
@Override
public void close() throws IOException {
myWrap.close();
}
}

View File

@ -0,0 +1,41 @@
package ca.uhn.fhir.jpa.term;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
import ca.uhn.fhir.util.TestUtil;
public class TerminologyLoaderSvcIntegrationTest extends BaseJpaDstu3Test {
private TerminologyLoaderSvc myLoader;
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
@Before
public void beforeInitTest() {
myLoader = new TerminologyLoaderSvc();
myLoader.setTermSvcForUnitTests(myTermSvc);
}
@Test
@Ignore
public void testLoadAndStoreSnomedCt() {
Map<String, File> files = new HashMap<String, File>();
files.put(TerminologyLoaderSvc.SCT_FILE_CONCEPT, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Concept_Full_INT_20160131.txt"));
files.put(TerminologyLoaderSvc.SCT_FILE_DESCRIPTION, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Description_Full-en_INT_20160131.txt"));
files.put(TerminologyLoaderSvc.SCT_FILE_RELATIONSHIP, new File("/Users/james/tmp/sct/SnomedCT_Release_INT_20160131_Full/Terminology/sct2_Relationship_Full_INT_20160131.txt"));
myLoader.processSnomedCtFiles(files, mySrd);
}
}

View File

@ -14,6 +14,7 @@ import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import ca.uhn.fhir.rest.method.RequestDetails;
import ca.uhn.fhir.util.TestUtil;
public class TerminologyLoaderSvcTest {
@ -35,7 +36,6 @@ public class TerminologyLoaderSvcTest {
}
@Test
// @Ignore
public void testLoadSnomedCt() throws Exception {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(bos);
@ -50,7 +50,8 @@ public class TerminologyLoaderSvcTest {
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
mySvc.loadSnomedCt(bos.toByteArray());
RequestDetails details = mock(RequestDetails.class);
mySvc.loadSnomedCt(bos.toByteArray(), details);
}
private void addEntry(ZipOutputStream zos, String fileName) throws IOException {

View File

@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -53,9 +54,9 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
TermConcept child = new TermConcept();
child.setCodeSystem(cs);
child.setCode("child");
parent.addChild(child);
parent.addChild(child, RelationshipTypeEnum.ISA);
child.addChild(parent);
child.addChild(parent, RelationshipTypeEnum.ISA);
try {
myTermSvc.storeNewCodeSystemVersion(table.getId(), "http://foo", cs);
@ -82,16 +83,16 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
cs.getConcepts().add(parentA);
TermConcept childAA = new TermConcept(cs, "childAA");
parentA.addChild(childAA);
parentA.addChild(childAA, RelationshipTypeEnum.ISA);
TermConcept childAAA = new TermConcept(cs, "childAAA");
childAA.addChild(childAAA);
childAA.addChild(childAAA, RelationshipTypeEnum.ISA);
TermConcept childAAB = new TermConcept(cs, "childAAB");
childAA.addChild(childAAB);
childAA.addChild(childAAB, RelationshipTypeEnum.ISA);
TermConcept childAB = new TermConcept(cs, "childAB");
parentA.addChild(childAB);
parentA.addChild(childAB, RelationshipTypeEnum.ISA);
TermConcept parentB = new TermConcept(cs, "ParentB");
cs.getConcepts().add(parentB);
@ -133,16 +134,16 @@ public class TerminologySvcImplTest extends BaseJpaDstu3Test {
cs.getConcepts().add(parentA);
TermConcept childAA = new TermConcept(cs, "childAA");
parentA.addChild(childAA);
parentA.addChild(childAA, RelationshipTypeEnum.ISA);
TermConcept childAAA = new TermConcept(cs, "childAAA");
childAA.addChild(childAAA);
childAA.addChild(childAAA, RelationshipTypeEnum.ISA);
TermConcept childAAB = new TermConcept(cs, "childAAB");
childAA.addChild(childAAB);
childAA.addChild(childAAB, RelationshipTypeEnum.ISA);
TermConcept childAB = new TermConcept(cs, "childAB");
parentA.addChild(childAB);
parentA.addChild(childAB, RelationshipTypeEnum.ISA);
TermConcept parentB = new TermConcept(cs, "ParentB");
cs.getConcepts().add(parentB);

View File

@ -8,3 +8,8 @@ id effectiveTime active moduleId definitionStatusId
105000 20040731 0 900000000000207008 900000000000074008
106004 20020131 1 900000000000207008 900000000000074008
107008 20020131 1 900000000000207008 900000000000074008
116680003 20020131 1 900000000000207008 900000000000074008
126815003 20020131 1 900000000000207008 900000000000074008
126813005 20020131 1 900000000000207008 900000000000074008
126813006 20020131 1 900000000000207008 900000000000074008
126817006 20020131 1 900000000000207008 900000000000074008

View File

@ -8,4 +8,4 @@ id effectiveTime active moduleId conceptId languageCode typeId term caseSignific
107012 20020131 1 900000000000207008 126819009 en 900000000000013009 Neoplasm of thoracic esophagus 900000000000020002
108019 20020131 1 900000000000207008 126820003 en 900000000000013009 Neoplasm of abdominal esophagus 900000000000020002
110017 20020131 1 900000000000207008 126822006 en 900000000000013009 Neoplasm of middle third of esophagus 900000000000020002
181114011 20020131 1 900000000000207008 116680003 en 900000000000013009 Is a 900000000000020002
181114011 20020131 1 900000000000207008 116680003 en 900000000000013009 Is a (attribute) 900000000000020002

View File

@ -2,3 +2,4 @@ id effectiveTime active moduleId sourceId destinationId relationshipGroup typeId
100022 20020131 1 900000000000207008 126815003 126813005 0 116680003 900000000000011006 900000000000451002
100022 20090731 0 900000000000207008 126816002 126813005 0 116680003 900000000000011006 900000000000451002
101021 20020131 1 900000000000207008 126817006 126815003 0 116680003 900000000000011006 900000000000451002
101021 20020131 1 900000000000207008 126815003 126817006 0 116680003 900000000000011006 900000000000451002

View File

@ -60,6 +60,7 @@ import org.hl7.fhir.dstu3.model.OperationDefinition.OperationParameterUse;
import org.hl7.fhir.dstu3.model.Reference;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.parser.DataFormatException;
@ -188,7 +189,7 @@ public class ServerConformanceProvider implements IServerConformanceProvider<Con
retVal.setPublisher(myPublisher);
retVal.setDateElement(conformanceDate());
retVal.setFhirVersion("1.4.0"); // TODO: pull from model
retVal.setFhirVersion(FhirVersionEnum.DSTU3.getFhirVersionString());
retVal.setAcceptUnknown(UnknownContentCode.EXTENSIONS); // TODO: make this configurable - this is a fairly big
// effort since the parser
// needs to be modified to actually allow it

View File

@ -39,6 +39,7 @@ import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.parser.CustomTypeDstu3Test;
import ca.uhn.fhir.parser.CustomTypeDstu3Test.MyCustomPatient;
import ca.uhn.fhir.parser.IParser;
@ -65,7 +66,7 @@ public class GenericClientDstu3Test {
}
private String expectedUserAgent() {
return "HAPI-FHIR/" + VersionUtil.getVersion() + " (FHIR Client; FHIR DSTU3; apache)";
return "HAPI-FHIR/" + VersionUtil.getVersion() + " (FHIR Client; FHIR " + FhirVersionEnum.DSTU3.getFhirVersionString() + "/DSTU3; apache)";
}
private byte[] extractBodyAsByteArray(ArgumentCaptor<HttpUriRequest> capt) throws IOException {

View File

@ -23,11 +23,13 @@ import org.junit.BeforeClass;
import org.junit.Test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.rest.annotation.ResourceParam;
import ca.uhn.fhir.rest.annotation.Validate;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.util.PortUtil;
import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.util.VersionUtil;
public class MetadataDstu3Test2 {
@ -48,6 +50,8 @@ public class MetadataDstu3Test2 {
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(output, containsString("<Conformance"));
assertEquals("HAPI FHIR " + VersionUtil.getVersion() + " REST Server (FHIR Server; FHIR " + FhirVersionEnum.DSTU3.getFhirVersionString() + "/DSTU3)", status.getFirstHeader("X-Powered-By").getValue());
httpPost = new HttpPost("http://localhost:" + ourPort + "/metadata");
status = ourClient.execute(httpPost);
output = IOUtils.toString(status.getEntity().getContent());