From 871914bd3b2a67b8c71891c2cc0f0107eb447589 Mon Sep 17 00:00:00 2001 From: Hanan Awwad Date: Mon, 3 May 2021 15:02:20 +0300 Subject: [PATCH] Concept properties are not being saved while using the operation apply-codesystem-delta-add (#2577) * fix issue #2280 * revert the indentation back. * revert-back untouched changes. * saving concept properties using the operation apply-codesystem-delta-add * fix test cases; add check for null properties * fix test; expected file descriptors --- .../cli/UploadTerminologyCommandTest.java | 2 +- .../provider/TerminologyUploaderProvider.java | 42 ++++++- .../term/TermCodeSystemStorageSvcImpl.java | 5 + .../uhn/fhir/jpa/term/TermLoaderSvcImpl.java | 16 ++- .../jpa/term/custom/CustomTerminologySet.java | 20 ++- .../fhir/jpa/term/custom/PropertyHandler.java | 53 ++++++++ .../ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 11 +- .../r4/TerminologyUploaderProviderR4Test.java | 118 ++++++++++++++++++ .../jpa/term/TerminologySvcDeltaR4Test.java | 10 +- .../test/resources/custom_term/properties.csv | 9 ++ 10 files changed, 267 insertions(+), 19 deletions(-) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java create mode 100644 hapi-fhir-jpaserver-base/src/test/resources/custom_term/properties.csv diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java index 8b1eccd4d5a..0e7288f36ea 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java @@ -116,7 +116,7 @@ public class UploadTerminologyCommandTest extends BaseTest { verify(myTermLoaderSvc, times(1)).loadDeltaAdd(eq("http://foo"), myDescriptorListCaptor.capture(), any()); List listOfDescriptors = myDescriptorListCaptor.getValue(); - assertEquals(1, listOfDescriptors.size()); + assertEquals(2, listOfDescriptors.size()); assertEquals("concepts.csv", listOfDescriptors.get(0).getFilename()); String uploadFile = IOUtils.toString(listOfDescriptors.get(0).getInputStream(), Charsets.UTF_8); assertThat(uploadFile, uploadFile, containsString("\"CODE\",\"Display\"")); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java index bacae19c497..f2e3a0ae1b3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java @@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.term.UploadStatistics; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.custom.ConceptHandler; import ca.uhn.fhir.jpa.term.custom.HierarchyHandler; +import ca.uhn.fhir.jpa.term.custom.PropertyHandler; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; @@ -214,12 +215,14 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { private void convertCodeSystemsToFileDescriptors(List theFiles, List theCodeSystems) { Map codes = new LinkedHashMap<>(); + Map> codeToProperties = new LinkedHashMap<>(); + Multimap codeToParentCodes = ArrayListMultimap.create(); if (theCodeSystems != null) { for (IBaseResource nextCodeSystemUncast : theCodeSystems) { CodeSystem nextCodeSystem = canonicalizeCodeSystem(nextCodeSystemUncast); - convertCodeSystemCodesToCsv(nextCodeSystem.getConcept(), codes, null, codeToParentCodes); + convertCodeSystemCodesToCsv(nextCodeSystem.getConcept(), codes, codeToProperties, null, codeToParentCodes); } } @@ -260,6 +263,36 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); theFiles.add(fileDescriptor); } + // Create codeToProperties file + if (codeToProperties.size() > 0) { + StringBuilder b = new StringBuilder(); + b.append(PropertyHandler.CODE); + b.append(","); + b.append(PropertyHandler.KEY); + b.append(","); + b.append(PropertyHandler.VALUE); + b.append(","); + b.append(PropertyHandler.TYPE); + b.append("\n"); + + for (Map.Entry> nextEntry : codeToProperties.entrySet()) { + for (CodeSystem.ConceptPropertyComponent propertyComponent : nextEntry.getValue()) { + b.append(csvEscape(nextEntry.getKey())); + b.append(","); + b.append(csvEscape(propertyComponent.getCode())); + b.append(","); + //TODO: check this for different types, other types should be added once TermConceptPropertyTypeEnum contain different types + b.append(csvEscape(propertyComponent.getValueStringType().getValue())); + b.append(","); + b.append(csvEscape(propertyComponent.getValue().primitiveValue())); + b.append("\n"); + } + } + byte[] bytes = b.toString().getBytes(Charsets.UTF_8); + String fileName = TermLoaderSvcImpl.CUSTOM_PROPERTIES_FILE; + ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); + theFiles.add(fileDescriptor); + } } @@ -283,14 +316,17 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { return nextCodeSystem; } - private void convertCodeSystemCodesToCsv(List theConcept, Map theCodes, String theParentCode, Multimap theCodeToParentCodes) { + private void convertCodeSystemCodesToCsv(List theConcept, Map theCodes, Map> theProperties, String theParentCode, Multimap theCodeToParentCodes) { for (CodeSystem.ConceptDefinitionComponent nextConcept : theConcept) { if (isNotBlank(nextConcept.getCode())) { theCodes.put(nextConcept.getCode(), nextConcept.getDisplay()); if (isNotBlank(theParentCode)) { theCodeToParentCodes.put(nextConcept.getCode(), theParentCode); } - convertCodeSystemCodesToCsv(nextConcept.getConcept(), theCodes, nextConcept.getCode(), theCodeToParentCodes); + if (nextConcept.getProperty() != null) { + theProperties.put(nextConcept.getCode(), nextConcept.getProperty()); + } + convertCodeSystemCodesToCsv(nextConcept.getConcept(), theCodes, theProperties, nextConcept.getCode(), theCodeToParentCodes); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java index ca03adf93bb..a17bb266708 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java @@ -580,6 +580,11 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { conceptToAdd.setParentPids(null); conceptToAdd.setCodeSystemVersion(theCsv); + if (conceptToAdd.getProperties() !=null) + conceptToAdd.getProperties().forEach(termConceptProperty -> { + termConceptProperty.setConcept(theConceptToAdd); + termConceptProperty.setCodeSystemVersion(theCsv); + }); if (theStatisticsTracker.getUpdatedConceptCount() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) { saveConcept(conceptToAdd); Long nextConceptPid = conceptToAdd.getId(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java index 788d1c10a87..d9864f8ed8e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; +import ca.uhn.fhir.jpa.entity.TermConceptProperty; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; @@ -140,6 +141,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class TermLoaderSvcImpl implements ITermLoaderSvc { public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv"; public static final String CUSTOM_HIERARCHY_FILE = "hierarchy.csv"; + public static final String CUSTOM_PROPERTIES_FILE = "properties.csv"; static final String IMGTHLA_HLA_NOM_TXT = "hla_nom.txt"; static final String IMGTHLA_HLA_XML = "hla.xml"; static final String CUSTOM_CODESYSTEM_JSON = "codesystem.json"; @@ -225,14 +227,14 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { uploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()) - ); + ); descriptors.verifyOptionalFilesExist(optionalFilenameFragments); ourLog.info("Beginning LOINC processing"); String codeSystemVersionId = uploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); - if (codeSystemVersionId != null ) { + if (codeSystemVersionId != null) { // Load the code system with version and then remove the version property. processLoincFiles(descriptors, theRequestDetails, uploadProperties, false); uploadProperties.remove(LOINC_CODESYSTEM_VERSION.getCode()); @@ -483,7 +485,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { try { String loincCsString = IOUtils.toString(BaseTermReadSvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/loinc/loinc.xml"), Charsets.UTF_8); loincCs = FhirContext.forR4().newXmlParser().parseResource(CodeSystem.class, loincCsString); - String codeSystemVersionId = theUploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); + String codeSystemVersionId = theUploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); if (codeSystemVersionId != null) { loincCs.setVersion(codeSystemVersionId); loincCs.setId(loincCs.getId() + "-" + codeSystemVersionId); @@ -789,4 +791,12 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { } return concept; } + + public static TermConceptProperty getOrCreateConceptProperty(Map> code2Properties, String code, String key) { + List termConceptProperties = code2Properties.get(code); + if (termConceptProperties == null) + return new TermConceptProperty(); + Optional termConceptProperty = termConceptProperties.stream().filter(property -> key.equals(property.getKey())).findFirst(); + return termConceptProperty.isPresent() ? termConceptProperty.get() : new TermConceptProperty(); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java index 3b53735b476..57002b6d521 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java @@ -22,18 +22,16 @@ package ca.uhn.fhir.jpa.term.custom; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; +import ca.uhn.fhir.jpa.entity.TermConceptProperty; import ca.uhn.fhir.jpa.term.IRecordHandler; import ca.uhn.fhir.jpa.term.LoadedFileDescriptors; import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; import org.apache.commons.csv.QuoteMode; import org.apache.commons.lang3.Validate; import javax.annotation.Nonnull; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -134,10 +132,24 @@ public class CustomTerminologySet { public static CustomTerminologySet load(LoadedFileDescriptors theDescriptors, boolean theFlat) { final Map code2concept = new LinkedHashMap<>(); - // Concepts IRecordHandler conceptHandler = new ConceptHandler(code2concept); + TermLoaderSvcImpl.iterateOverZipFile(theDescriptors, TermLoaderSvcImpl.CUSTOM_CONCEPTS_FILE, conceptHandler, ',', QuoteMode.NON_NUMERIC, false); + + if (theDescriptors.hasFile(TermLoaderSvcImpl.CUSTOM_PROPERTIES_FILE)) { + Map> theCode2property = new LinkedHashMap<>(); + IRecordHandler propertyHandler = new PropertyHandler(theCode2property); + TermLoaderSvcImpl.iterateOverZipFile(theDescriptors, TermLoaderSvcImpl.CUSTOM_PROPERTIES_FILE, propertyHandler, ',', QuoteMode.NON_NUMERIC, false); + for (TermConcept termConcept : code2concept.values()) { + if (!theCode2property.isEmpty() && theCode2property.get(termConcept.getCode()) != null) { + theCode2property.get(termConcept.getCode()).forEach(property -> { + termConcept.getProperties().add(property); + }); + } + } + } + if (theFlat) { return new CustomTerminologySet(code2concept.size(), new ArrayList<>(code2concept.values())); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java new file mode 100644 index 00000000000..cabdb8deb1b --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java @@ -0,0 +1,53 @@ +package ca.uhn.fhir.jpa.term.custom; + +import ca.uhn.fhir.jpa.entity.TermConceptProperty; +import ca.uhn.fhir.jpa.entity.TermConceptPropertyTypeEnum; +import ca.uhn.fhir.jpa.term.IRecordHandler; +import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl; +import ca.uhn.fhir.util.ValidateUtil; +import org.apache.commons.csv.CSVRecord; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.trim; + +public class PropertyHandler implements IRecordHandler { + + public static final String CODE = "CODE"; + public static final String KEY = "KEY"; + public static final String VALUE = "VALUE"; + public static final String TYPE = "TYPE"; + private final Map> myCode2Properties; + + public PropertyHandler(Map> theCode2concept) { + myCode2Properties = theCode2concept; + } + + @Override + public void accept(CSVRecord theRecord) { + String code = trim(theRecord.get(CODE)); + String key = trim(theRecord.get(KEY)); + + if (isNotBlank(code) && isNotBlank(KEY)) { + String value = trim(theRecord.get(VALUE)); + String type = trim(theRecord.get(TYPE)); + + List conceptProperties = myCode2Properties.get(code); + if (conceptProperties == null) + conceptProperties = new ArrayList<>(); + + TermConceptProperty conceptProperty = TermLoaderSvcImpl.getOrCreateConceptProperty(myCode2Properties, code, key); + ValidateUtil.isNotNullOrThrowUnprocessableEntity(conceptProperty, "Concept property %s not found in file", conceptProperty); + + conceptProperty.setKey(key); + conceptProperty.setValue(value); + //TODO: check this for different types, other types should be added once TermConceptPropertyTypeEnum contain different types + conceptProperty.setType(TermConceptPropertyTypeEnum.STRING); + conceptProperties.add(conceptProperty); + myCode2Properties.put(code, conceptProperties); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index 9cb19833adf..5d2337f14e5 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -48,6 +48,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementTargetDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao; +import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; @@ -282,6 +283,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil @Autowired protected ITermConceptDesignationDao myTermConceptDesignationDao; @Autowired + protected ITermConceptPropertyDao myTermConceptPropertyDao; + @Autowired @Qualifier("myConditionDaoR4") protected IFhirResourceDao myConditionDao; @Autowired @@ -541,7 +544,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil @BeforeEach public void beforeFlushFT() { runInTransaction(() -> { - SearchSession searchSession = Search.session(myEntityManager); + SearchSession searchSession = Search.session(myEntityManager); searchSession.workspace(ResourceTable.class).purge(); searchSession.indexingPlan().execute(); }); @@ -644,6 +647,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil List children = nextCode.getChildCodes(); flattenExpansionHierarchy(theFlattenedHierarchy, children, thePrefix + " "); + } } @@ -795,10 +799,10 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil if (theSystem != null) { stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem())); } - if (theCode != null ) { + if (theCode != null) { stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode())); } - if (theDisplay != null){ + if (theDisplay != null) { stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay())); } if (theDesignationCount != null) { @@ -815,6 +819,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil return first.get(); } } + public List getExpandedConceptsByValueSetUrl(String theValuesetUrl) { return runInTransaction(() -> { List valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java index f1aa18de3ff..c6c3c85410a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java @@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.provider.r4; import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; +import ca.uhn.fhir.jpa.entity.TermConceptProperty; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; @@ -12,9 +13,11 @@ import com.google.common.base.Charsets; import org.apache.commons.io.IOUtils; import org.hl7.fhir.r4.model.Attachment; import org.hl7.fhir.r4.model.CodeSystem; +import org.hl7.fhir.r4.model.CodeType; import org.hl7.fhir.r4.model.IntegerType; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Reference; +import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.UriType; import org.junit.jupiter.api.Test; @@ -24,6 +27,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Optional; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; @@ -236,6 +240,62 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes )); } + @Test + public void testApplyDeltaAdd_UsingCsv_withPropertiesCsv() throws IOException { + String conceptsCsv = loadResource("/custom_term/concepts.csv"); + Attachment conceptsAttachment = new Attachment() + .setData(conceptsCsv.getBytes(Charsets.UTF_8)) + .setContentType("text/csv") + .setUrl("file:/foo/concepts.csv"); + String hierarchyCsv = loadResource("/custom_term/hierarchy.csv"); + Attachment hierarchyAttachment = new Attachment() + .setData(hierarchyCsv.getBytes(Charsets.UTF_8)) + .setContentType("text/csv") + .setUrl("file:/foo/hierarchy.csv"); + String propertiesCsv = loadResource("/custom_term/properties.csv"); + Attachment propertiesAttachment = new Attachment() + .setData(propertiesCsv.getBytes(Charsets.UTF_8)) + .setContentType("text/csv") + .setUrl("file:/foo/properties.csv"); + LoggingInterceptor interceptor = new LoggingInterceptor(true); + myClient.registerInterceptor(interceptor); + Parameters outcome = myClient + .operation() + .onType(CodeSystem.class) + .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) + .withParameter(Parameters.class, TerminologyUploaderProvider.PARAM_SYSTEM, new UriType("http://foo/cs")) + .andParameter(TerminologyUploaderProvider.PARAM_FILE, conceptsAttachment) + .andParameter(TerminologyUploaderProvider.PARAM_FILE, hierarchyAttachment) + .andParameter(TerminologyUploaderProvider.PARAM_FILE, propertiesAttachment) + .prettyPrint() + .execute(); + myClient.unregisterInterceptor(interceptor); + + String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); + ourLog.info(encoded); + assertThat(encoded, stringContainsInOrder( + "\"name\": \"conceptCount\"", + "\"valueInteger\": 5", + "\"name\": \"target\"", + "\"reference\": \"CodeSystem/" + )); + runInTransaction(() -> { + TermCodeSystem cs = myTermCodeSystemDao.findByCodeSystemUri("http://foo/cs"); + TermCodeSystemVersion version = cs.getCurrentVersion(); + TermConcept microCode = myTermConceptDao.findByCodeSystemAndCode(version, "NEUT").get(); + assertEquals(2, microCode.getProperties().size()); + TermConcept code = myTermConceptDao.findByCodeSystemAndCode(version, "HB").get(); + assertEquals(1, code.getProperties().size()); + Integer codeProperties = myTermConceptPropertyDao.countByCodeSystemVersion(version.getPid()); + assertEquals(6, codeProperties); + Optional first = code.getProperties().stream().filter(property -> "color".equalsIgnoreCase(property.getKey()) && "red".equalsIgnoreCase(property.getValue())).findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Concept %s did not contain property with key %s and value %s ", code.getCode(), "property1", "property1Value"); + fail(failureMessage); + } + }); + } + @Test public void testApplyDeltaAdd_UsingCodeSystem() { CodeSystem codeSystem = new CodeSystem(); @@ -276,6 +336,64 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes ); } + @Test + public void testApplyDeltaAdd_UsingCodeSystemWithConceptProprieties() { + CodeSystem codeSystem = new CodeSystem(); + codeSystem.setUrl("http://foo/cs"); + CodeSystem.ConceptDefinitionComponent chem = codeSystem.addConcept().setCode("CHEM").setDisplay("Chemistry").addProperty(new CodeSystem.ConceptPropertyComponent(new CodeType("color"), new StringType("green"))); + chem.addConcept().setCode("HB").setDisplay("Hemoglobin").addProperty(new CodeSystem.ConceptPropertyComponent(new CodeType("color"), new StringType("red"))); + chem.addConcept().setCode("NEUT").setDisplay("Neutrophils").addProperty(new CodeSystem.ConceptPropertyComponent(new CodeType("color"), new StringType("pink"))).addProperty(new CodeSystem.ConceptPropertyComponent(new CodeType("shape"), new StringType("spherical"))); + CodeSystem.ConceptDefinitionComponent micro = codeSystem.addConcept().setCode("MICRO").setDisplay("Microbiology").addProperty(new CodeSystem.ConceptPropertyComponent(new CodeType("color"), new StringType("yellow"))); + micro.addConcept().setCode("C&S").setDisplay("Culture And Sensitivity").addProperty(new CodeSystem.ConceptPropertyComponent(new CodeType("color"), new StringType("bellow"))); + + LoggingInterceptor interceptor = new LoggingInterceptor(true); + myClient.registerInterceptor(interceptor); + Parameters outcome = myClient + .operation() + .onType(CodeSystem.class) + .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) + .withParameter(Parameters.class, TerminologyUploaderProvider.PARAM_SYSTEM, new UriType("http://foo/cs")) + .andParameter(TerminologyUploaderProvider.PARAM_CODESYSTEM, codeSystem) + .prettyPrint() + .execute(); + myClient.unregisterInterceptor(interceptor); + + String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); + ourLog.info(encoded); + assertThat(encoded, stringContainsInOrder( + "\"name\": \"conceptCount\"", + "\"valueInteger\": 5", + "\"name\": \"target\"", + "\"reference\": \"CodeSystem/" + )); + + assertHierarchyContains( + "CHEM seq=0", + " HB seq=0", + " NEUT seq=1", + "MICRO seq=0", + " C&S seq=0" + ); + + runInTransaction(() -> { + TermCodeSystem cs = myTermCodeSystemDao.findByCodeSystemUri("http://foo/cs"); + TermCodeSystemVersion version = cs.getCurrentVersion(); + TermConcept microCode = myTermConceptDao.findByCodeSystemAndCode(version, "NEUT").get(); + assertEquals(2, microCode.getProperties().size()); + TermConcept code = myTermConceptDao.findByCodeSystemAndCode(version, "HB").get(); + assertEquals(1, code.getProperties().size()); + Integer codeProperties = myTermConceptPropertyDao.countByCodeSystemVersion(version.getPid()); + assertEquals(6, codeProperties); + Optional first = code.getProperties().stream().filter(property -> "color".equalsIgnoreCase(property.getKey()) && "red".equalsIgnoreCase(property.getValue())).findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Concept %s did not contain property with key %s and value %s ", code.getCode(), "property1", "property1Value"); + fail(failureMessage); + } + }); + + } + + @Test public void testApplyDeltaAdd_UsingCodeSystemWithComma() throws IOException { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java index a03bed19c8b..b7930b966c6 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java @@ -558,21 +558,21 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test { assertEquals(true, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAAA").isPresent())); // Remove CodeA - runInTransaction(()->{ - ourLog.info("About to remove CodeA. Have codes:\n * {}", myTermConceptDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * "))); + runInTransaction(() -> { + ourLog.info("About to remove CodeA. Have codes:\n * {}", myTermConceptDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))); }); myCaptureQueriesListener.clear(); - runInTransaction(()->{ + runInTransaction(() -> { CustomTerminologySet delta2 = new CustomTerminologySet(); delta2.addRootConcept("codeA"); myTermCodeSystemStorageSvc.applyDeltaCodeSystemsRemove("http://foo/cs", delta2); }); myCaptureQueriesListener.logAllQueries(); - runInTransaction(()->{ - ourLog.info("Done removing. Have codes:\n * {}", myTermConceptDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * "))); + runInTransaction(() -> { + ourLog.info("Done removing. Have codes:\n * {}", myTermConceptDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))); }); assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeB").isPresent())); diff --git a/hapi-fhir-jpaserver-base/src/test/resources/custom_term/properties.csv b/hapi-fhir-jpaserver-base/src/test/resources/custom_term/properties.csv new file mode 100644 index 00000000000..a7a39266e91 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/resources/custom_term/properties.csv @@ -0,0 +1,9 @@ +CODE,KEY, VALUE, TYPE + +CHEM,color,green,String +HB,color,red,String +NEUT,color,pink,String +NEUT,shape,spherical,String + +MICRO,color,yellow,String +C&S,color,blue,String