From 6cf5989d6c494f928dc31adfdcefdccec803549b Mon Sep 17 00:00:00 2001 From: jmarchionatto <60409882+jmarchionatto@users.noreply.github.com> Date: Thu, 3 Nov 2022 08:48:02 -0400 Subject: [PATCH] Issue 4106 process map to properties when uploading loinc terminology (#4231) * Add loinc upload handler to process MAP_TO properties. * Move test to test folder * Fix mapto file location property name * Allow override as full loinc upload tests take (much) longer than defined timeout * Move handler later to make sure all concepts were added to map * Update test to check all project enhancements * Restore project unrelated changes * Rename base test class properly and move to right package * Condition MapTo.csv file processing to file presence for backwards as file is optional * Document and annotate test-only usage for disabling deferred tasks timeout Co-authored-by: juan.marchionatto --- ...ties-when-uploading-loinc-terminology.yaml | 4 + .../fhir/jpa/term/LoadedFileDescriptors.java | 4 + .../jpa/term/TermDeferredStorageSvcImpl.java | 18 +- .../uhn/fhir/jpa/term/TermLoaderSvcImpl.java | 25 +- .../jpa/term/api/ITermDeferredStorageSvc.java | 5 + .../jpa/term/loinc/LoincMapToHandler.java | 79 +++++ .../term/loinc/LoincUploadPropertiesEnum.java | 5 +- .../ValueSetHSearchExpansionR4ElasticIT.java | 3 +- .../ValueSetHSearchExpansionR4LuceneIT.java | 3 +- .../BaseValueSetHSearchExpansionR4Test.java} | 12 +- .../jpa/term/LoincFullLoadR4SandboxIT.java | 305 ++++++++++++------ .../src/test/resources/loinc-full/readme.txt | 8 +- 12 files changed, 340 insertions(+), 131 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_2_0/4106-process-map-to-properties-when-uploading-loinc-terminology.yaml create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java rename hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/{term/AbstractValueSetHSearchExpansionR4Test.java => test/BaseValueSetHSearchExpansionR4Test.java} (99%) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_2_0/4106-process-map-to-properties-when-uploading-loinc-terminology.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_2_0/4106-process-map-to-properties-when-uploading-loinc-terminology.yaml new file mode 100644 index 00000000000..e976756bbd0 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_2_0/4106-process-map-to-properties-when-uploading-loinc-terminology.yaml @@ -0,0 +1,4 @@ +--- +type: add +issue: 4106 +title: "LOINC terminology upload process was enhanced by loading `MAP_TO` properties defined in MapTo.csv input file to TermConcept(s)." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java index ced0235afa3..015282cd55f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java @@ -143,6 +143,10 @@ public class LoadedFileDescriptors implements Closeable { } } + boolean isOptionalFilesExist(List theFileList) { + return notFound(theFileList).isEmpty(); + } + void verifyPartLinkFilesExist(List theMultiPartLinkFiles, String theSinglePartLinkFile) { List notFoundMulti = notFound(theMultiPartLinkFiles); List notFoundSingle = notFound(Arrays.asList(theSinglePartLinkFile)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java index 5f4fbdf770d..54d48deb6bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java @@ -78,6 +78,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class); private static final long SAVE_ALL_DEFERRED_WARN_MINUTES = 1; private static final long SAVE_ALL_DEFERRED_ERROR_MINUTES = 5; + private boolean myAllowDeferredTasksTimeout = true; private final List myDeferredCodeSystemsDeletions = Collections.synchronizedList(new ArrayList<>()); private final Queue myDeferredCodeSystemVersionsDeletions = new ConcurrentLinkedQueue<>(); private final List myDeferredConcepts = Collections.synchronizedList(new ArrayList<>()); @@ -274,13 +275,18 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { @Override public void saveAllDeferred() { - TimeoutManager timeoutManager = new TimeoutManager(TermDeferredStorageSvcImpl.class.getName() + ".saveAllDeferred()", - Duration.of(SAVE_ALL_DEFERRED_WARN_MINUTES, ChronoUnit.MINUTES), - Duration.of(SAVE_ALL_DEFERRED_ERROR_MINUTES, ChronoUnit.MINUTES)); + TimeoutManager timeoutManager = null; + if (myAllowDeferredTasksTimeout) { + timeoutManager = new TimeoutManager(TermDeferredStorageSvcImpl.class.getName() + ".saveAllDeferred()", + Duration.of(SAVE_ALL_DEFERRED_WARN_MINUTES, ChronoUnit.MINUTES), + Duration.of(SAVE_ALL_DEFERRED_ERROR_MINUTES, ChronoUnit.MINUTES)); + } while (!isStorageQueueEmpty()) { - if (timeoutManager.checkTimeout()) { - ourLog.info(toString()); + if (myAllowDeferredTasksTimeout) { + if (timeoutManager.checkTimeout()) { + ourLog.info(toString()); + } } saveDeferred(); } @@ -486,6 +492,8 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { void setCodeSystemVersionDaoForUnitTest(ITermCodeSystemVersionDao theCodeSystemVersionDao) { myCodeSystemVersionDao = theCodeSystemVersionDao; } + @Override + public void disallowDeferredTaskTimeout() { myAllowDeferredTasksTimeout = false; } @Override @VisibleForTesting diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java index c6ea77a2730..0da5febc40e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java @@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.term.loinc.LoincIeeeMedicalDeviceCodeHandler; import ca.uhn.fhir.jpa.term.loinc.LoincImagingDocumentCodeHandler; import ca.uhn.fhir.jpa.term.loinc.LoincLinguisticVariantHandler; import ca.uhn.fhir.jpa.term.loinc.LoincLinguisticVariantsHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincMapToHandler; import ca.uhn.fhir.jpa.term.loinc.LoincParentGroupFileHandler; import ca.uhn.fhir.jpa.term.loinc.LoincPartHandler; import ca.uhn.fhir.jpa.term.loinc.LoincPartLinkHandler; @@ -112,6 +113,8 @@ import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUIS import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT; import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_PATH; import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_MAPTO_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_MAPTO_FILE_DEFAULT; import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE; import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE_DEFAULT; import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE; @@ -260,6 +263,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { uploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()), //-- optional consumer name uploadProperties.getProperty(LOINC_CONSUMER_NAME_FILE.getCode(), LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()), @@ -398,7 +402,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { @Override public UploadStatistics loadDeltaAdd(String theSystem, List theFiles, RequestDetails theRequestDetails) { - ourLog.info("Processing terminology delta ADD for system[{}] with files: {}", theSystem, theFiles.stream().map(t -> t.getFilename()).collect(Collectors.toList())); + ourLog.info("Processing terminology delta ADD for system[{}] with files: {}", theSystem, theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList())); try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) { CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, false); return myCodeSystemStorageSvc.applyDeltaCodeSystemsAdd(theSystem, terminologySet); @@ -407,7 +411,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { @Override public UploadStatistics loadDeltaRemove(String theSystem, List theFiles, RequestDetails theRequestDetails) { - ourLog.info("Processing terminology delta REMOVE for system[{}] with files: {}", theSystem, theFiles.stream().map(t -> t.getFilename()).collect(Collectors.toList())); + ourLog.info("Processing terminology delta REMOVE for system[{}] with files: {}", theSystem, theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList())); try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) { CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, true); return myCodeSystemStorageSvc.applyDeltaCodeSystemsRemove(theSystem, terminologySet); @@ -574,8 +578,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { int valueSetCount = valueSets.size(); int rootConceptCount = codeSystemVersion.getConcepts().size(); - int conceptCount = rootConceptCount; - ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", conceptCount, rootConceptCount, valueSetCount); + ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", rootConceptCount, rootConceptCount, valueSetCount); // remove this when fully implemented ... throw new InternalErrorException(Msg.code(874) + "HLA nomenclature terminology upload not yet fully implemented."); @@ -716,13 +719,19 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { handler = new LoincLinguisticVariantsHandler(linguisticVariants); iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_FILE.getCode(), LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - String langFileName = null; + String langFileName; for (LoincLinguisticVariantsHandler.LinguisticVariant linguisticVariant : linguisticVariants) { handler = new LoincLinguisticVariantHandler(code2concept, linguisticVariant.getLanguageCode()); langFileName = linguisticVariant.getLinguisticVariantFileName(); iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_PATH.getCode() + langFileName, LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + langFileName), handler, ',', QuoteMode.NON_NUMERIC, false); } + if (theDescriptors.isOptionalFilesExist(List.of(theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode())))) { + // LOINC MapTo codes (last to make sure that all concepts were added to code2concept map) + handler = new LoincMapToHandler(code2concept); + iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + } + if (theCloseFiles) { IOUtils.closeQuietly(theDescriptors); } @@ -801,7 +810,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { ourLog.info("Looking for root codes"); rootConcepts .entrySet() - .removeIf(theStringTermConceptEntry -> theStringTermConceptEntry.getValue().getParents().isEmpty() == false); + .removeIf(theStringTermConceptEntry -> !theStringTermConceptEntry.getValue().getParents().isEmpty()); ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(), code2concept.size()); @@ -857,7 +866,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { int nextLoggedCount = 0; while (iter.hasNext()) { CSVRecord nextRecord = iter.next(); - if (nextRecord.isConsistent() == false) { + if (!nextRecord.isConsistent()) { continue; } theHandler.accept(nextRecord); @@ -945,6 +954,6 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { if (termConceptProperties == null) return new TermConceptProperty(); Optional termConceptProperty = termConceptProperties.stream().filter(property -> key.equals(property.getKey())).findFirst(); - return termConceptProperty.isPresent() ? termConceptProperty.get() : new TermConceptProperty(); + return termConceptProperty.orElseGet(TermConceptProperty::new); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java index 3685b90cdc1..bcad6a7c220 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java @@ -65,4 +65,9 @@ public interface ITermDeferredStorageSvc { void saveAllDeferred(); void logQueueForUnitTest(); + + /** + * Only to be used from tests - Disallow test timeouts on deferred tasks + */ + void disallowDeferredTaskTimeout(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java new file mode 100644 index 00000000000..e633934eaa4 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java @@ -0,0 +1,79 @@ +package ca.uhn.fhir.jpa.term.loinc; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2022 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.entity.TermConcept; +import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; +import org.apache.commons.csv.CSVRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.trim; + +/** + * Handles addition of MAP_TO properties to TermConcepts + */ +public class LoincMapToHandler implements IZipContentsHandlerCsv { + private static final Logger ourLog = LoggerFactory.getLogger(LoincMapToHandler.class); + + public static final String CONCEPT_CODE_PROP_NAME = "LOINC"; + public static final String MAP_TO_PROP_NAME = "MAP_TO"; + public static final String DISPLAY_PROP_NAME = "COMMENT"; + + private final Map myCode2Concept; + + public LoincMapToHandler(Map theCode2concept) { + myCode2Concept = theCode2concept; + } + + @Override + public void accept(CSVRecord theRecord) { + String code = trim(theRecord.get(CONCEPT_CODE_PROP_NAME)); + String mapTo = trim(theRecord.get(MAP_TO_PROP_NAME)); + String display = trim(theRecord.get(DISPLAY_PROP_NAME)); + + if (isBlank(code)) { + ourLog.warn("MapTo record was found with a blank '" + CONCEPT_CODE_PROP_NAME + "' property"); + return; + } + + if (isBlank(mapTo)) { + ourLog.warn("MapTo record was found with a blank '" + MAP_TO_PROP_NAME + "' property"); + return; + } + + TermConcept concept = myCode2Concept.get(code); + if (concept == null) { + ourLog.warn("A TermConcept was not found for MapTo '" + CONCEPT_CODE_PROP_NAME + + "' property: '" + code + "' MapTo record ignored."); + return; + } + + concept.addPropertyCoding(MAP_TO_PROP_NAME, ITermLoaderSvc.LOINC_URI, mapTo, display); + ourLog.trace("Adding " + MAP_TO_PROP_NAME + " coding property: {} to concept.code {}", mapTo, concept.getCode()); + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java index 51429778d15..9a18d973ba4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java @@ -35,6 +35,9 @@ public enum LoincUploadPropertiesEnum { LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"), LOINC_XML_FILE("loinc.xml"), + LOINC_MAPTO_FILE("loinc.mapto.file"), + LOINC_MAPTO_FILE_DEFAULT("LoincTable/MapTo.csv"), + /* * MANDATORY */ @@ -153,7 +156,7 @@ public enum LoincUploadPropertiesEnum { public static LoincUploadPropertiesEnum fromCode(String theCode) { if (ourValues == null) { - HashMap values = new HashMap(); + HashMap values = new HashMap<>(); for (LoincUploadPropertiesEnum next : values()) { values.put(next.getCode(), next); } diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4ElasticIT.java b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4ElasticIT.java index 64d94219b3b..75e4d3ad094 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4ElasticIT.java +++ b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4ElasticIT.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch; +import ca.uhn.fhir.jpa.test.BaseValueSetHSearchExpansionR4Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; @@ -10,6 +11,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension; */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class) -public class ValueSetHSearchExpansionR4ElasticIT extends AbstractValueSetHSearchExpansionR4Test { +public class ValueSetHSearchExpansionR4ElasticIT extends BaseValueSetHSearchExpansionR4Test { } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4LuceneIT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4LuceneIT.java index a6a49d7827d..17b58d97972 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4LuceneIT.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetHSearchExpansionR4LuceneIT.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.term; +import ca.uhn.fhir.jpa.test.BaseValueSetHSearchExpansionR4Test; import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig; import ca.uhn.fhir.jpa.test.config.TestR4Config; import org.junit.jupiter.api.extension.ExtendWith; @@ -12,6 +13,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension; */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.DefaultLuceneHeap.class}) -public class ValueSetHSearchExpansionR4LuceneIT extends AbstractValueSetHSearchExpansionR4Test { +public class ValueSetHSearchExpansionR4LuceneIT extends BaseValueSetHSearchExpansionR4Test { } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/AbstractValueSetHSearchExpansionR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseValueSetHSearchExpansionR4Test.java similarity index 99% rename from hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/AbstractValueSetHSearchExpansionR4Test.java rename to hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseValueSetHSearchExpansionR4Test.java index 17d7d0bf9a5..326fe8b667a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/AbstractValueSetHSearchExpansionR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseValueSetHSearchExpansionR4Test.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.term; +package ca.uhn.fhir.jpa.test; /*- * #%L @@ -36,11 +36,15 @@ import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; +import ca.uhn.fhir.jpa.term.IValueSetConceptAccumulator; +import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; +import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; +import ca.uhn.fhir.jpa.term.TermReadSvcImpl; +import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; -import ca.uhn.fhir.jpa.test.BaseJpaTest; import ca.uhn.fhir.parser.StrictErrorHandler; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -109,8 +113,8 @@ import static org.mockito.Mockito.when; //@ExtendWith(SpringExtension.class) //@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.DefaultLuceneHeap.class}) //@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class}) -public abstract class AbstractValueSetHSearchExpansionR4Test extends BaseJpaTest { - private static final Logger ourLog = LoggerFactory.getLogger(AbstractValueSetHSearchExpansionR4Test.class); +public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest { + private static final Logger ourLog = LoggerFactory.getLogger(BaseValueSetHSearchExpansionR4Test.class); private static final String CS_URL = "http://example.com/my_code_system"; private static final String CS_URL_2 = "http://example.com/my_code_system2"; diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java index 0be1f91fa1a..84c1b1b2e70 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java @@ -15,7 +15,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; -import ca.uhn.fhir.jpa.term.api.ITermReadSvc; +import ca.uhn.fhir.jpa.term.loinc.LoincMapToHandler; import ca.uhn.fhir.jpa.test.BaseJpaTest; import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig; import ca.uhn.fhir.jpa.test.config.TestR4Config; @@ -35,6 +35,7 @@ import org.hibernate.dialect.PostgreSQL10Dialect; import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.Coding; import org.hl7.fhir.r4.model.ValueSet; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -64,7 +65,6 @@ import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -89,38 +89,46 @@ import static org.junit.jupiter.api.Assertions.fail; /** * Sandbox test (not intended to run on CI build) so must be kept disabled + * It has two running modes (load a DB and test loaded data) defined by the property LOAD_DB * * Requires the loinc-full resource directory to contain the following files: * _ Loinc_1.11.zip * _ v1.11_loincupload.properties * - * but last one is too large for the repo, so before running this test, copy it from: - * https://drive.google.com/drive/folders/18be2R5IurlWnugkl18nDG7wrwPsOtfR-?usp=sharing + * but first one is too large for the repo, so before running this test, copy it from: + * here * (SmileCDR has access) * - * Can be executed with Lucene or Elastic configuration + * Can be executed with Lucene, Elastic or no FT configuration * */ @Disabled("Sandbox test") @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { LoincFullLoadR4SandboxIT.NoopMandatoryTransactionListener.class + + // one of the following needs to be present + // TestR4Config.class // uses in-memory DB ,LoincFullLoadR4SandboxIT.OverriddenR4Config.class // your configured persistent DB - // pick up elastic, lucene or no-full-text engine: + + // pick up elastic or lucene engine: ,TestHSearchAddInConfig.NoFT.class }) public class LoincFullLoadR4SandboxIT extends BaseJpaTest { private static final Logger ourLog = LoggerFactory.getLogger(LoincFullLoadR4SandboxIT.class); + private static final DecimalFormat ourDecimalFormat = new DecimalFormat("#,###"); + + public static final boolean USE_REAL_DB = true; public static final boolean LOAD_DB = false; - public static final String DB_NAME = "cdr_loinc_display"; + public static final String DB_NAME = "testDB_mapto"; + - private static final DecimalFormat ourDecimalFormat = new DecimalFormat("#,###"); public static final String LOINC_URL = "http://loinc.org"; public static final String TEST_FILES_CLASSPATH = "loinc-full/"; @@ -133,11 +141,9 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { // ----------------------------------------------------------------------------------------- -// full LOINC file 1.11 Initially cloned from 2.73 for tests, with custom lonc.xml file with added 24 new properties -// Note that internal defined version is 2.78 +// full LOINC file 1.11 (initially cloned from 2.73 for tests, with custom lonc.xml file with added 24 new properties) -// public static final String CS_VERSION = "1.11"; - public static final String CS_VERSION = "2.78"; + public static final String CS_VERSION = "1.11"; public static final int CS_CONCEPTS_COUNT = 234_390; public static final int ASSOCIATED_OBSERVATIONS_COUNT = 8_058; public static final int ASK_AT_ORDER_ENTRY_COUNT = 65; @@ -158,7 +164,6 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { @Autowired private EntityManager myEntityManager; @Autowired private TermLoaderSvcImpl myTermLoaderSvc; @Autowired private ITermConceptDao myTermConceptDao; - @Autowired private ITermReadSvc myTermReadSvc; @Autowired private ITermDeferredStorageSvc myTerminologyDeferredStorageSvc; @Autowired private ITermCodeSystemDao myTermCodeSystemDao; @Autowired private ITermCodeSystemVersionDao myTermCodeSystemVersionDao; @@ -171,10 +176,9 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { private int associatedObservationsCount = 0; private int askAtOrderEntryCount = 0; - private int processedPropertiesCounter = 0; - - private static List recordPropertyNames; - private static List newRecordPropertyNames = List.of( + private int validatedPropertiesCounter = 0; + private int validatedMapToEntriesCounter = 0; + private final static List newRecordPropertyNames = List.of( "CHNG_TYPE", "DefinitionDescription", "CONSUMER_NAME", @@ -201,6 +205,16 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { "ValidHL7AttachmentRequest" ); + + @BeforeEach + void setUp() { + if (LOAD_DB) { + // real load requires longer time than allowed for unit tests + myTerminologyDeferredStorageSvc.disallowDeferredTaskTimeout(); + } + } + + @Test() public void uploadLoincCodeSystem() throws Exception { @@ -214,7 +228,7 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { // save all deferred concepts, properties, links, etc sw.restart(); - saveAllDeferredNoTimeout(); + myTerminologyDeferredStorageSvc.saveAllDeferred(); ourLog.info("=================> Saving all terminology deferred entities took {}", sw); validateSavedConceptsCount(); @@ -234,13 +248,14 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { // query each code and validate that all properties in both maps are set List> conceptPropertyCvsMap = readLoincCsvRecordsAsMap(); - Multimap> conceptMapToCvsMap = ArrayListMultimap.create(); + Multimap> conceptMapToCvsMap = readMapToCsvRecordsAsMap(); validateCreatedConceptsHaveAllProperties( conceptPropertyCvsMap, conceptMapToCvsMap ); - ourLog.info("Processed properties : {}", processedPropertiesCounter); - ourLog.info("associatedObservationsCount : {}", associatedObservationsCount); - ourLog.info("askAtOrderEntryCount : {}", askAtOrderEntryCount); + ourLog.info("Validated properties :{}", String.format("%,6d", validatedPropertiesCounter)); + ourLog.info("Validated MapTo entries :{}", String.format("%,6d", validatedMapToEntriesCounter)); + ourLog.info("associatedObservationsCount :{}", String.format("%,6d", associatedObservationsCount)); + ourLog.info("askAtOrderEntryCount :{}", String.format("%,6d", askAtOrderEntryCount)); ourLog.info(""); assertEquals(ASK_AT_ORDER_ENTRY_COUNT, askAtOrderEntryCount); @@ -252,50 +267,22 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { } - private void saveAllDeferredNoTimeout() { - while( ! myTerminologyDeferredStorageSvc.isStorageQueueEmpty() ) { - myTerminologyDeferredStorageSvc.saveDeferred(); - } - } /** - * Used occasionally for some manual validation - don't delete + * Calls validators for each TC for the code in each record in theConceptPropertyInputMap. + * @param theConceptPropertyInputMap records in loinc.csv input file mapped by propertyName -> propertyValue + * @param theConceptMapToCvsMap records in MapTo.csv input file mapped by TC-code -> List of Pair (value, display) */ - private void queryForSpecificValueSet() { - runInTransaction(() -> { - Query q = myEntityManager.createQuery("from ForcedId where myForcedId like 'LG8749-6%'"); - @SuppressWarnings("unchecked") - List fIds = (List) q.getResultList(); - long res_id = fIds.stream().map(ForcedId::getId).sorted().findFirst().orElse(fail("ForcedId not found")); - - Query q1 = myEntityManager.createQuery("from ResourceTable where id = " + res_id); - @SuppressWarnings("unchecked") - List vsList = (List) q1.getResultList(); - assertEquals(1, vsList.size()); - long vsLongId = vsList.get(0).getId(); - ValueSet vs = (ValueSet) myValueSetDao.toResource( vsList.get(0), false ); - assertNotNull(vs); - - Query q2 = myEntityManager.createQuery("from TermValueSet where myResource = " + vsLongId); - @SuppressWarnings("unchecked") - List tvsList = (List) q2.getResultList(); - assertEquals(1, tvsList.size()); - - TermValueSet termValueSet = tvsList.get(0); - }); - } - - private void validateCreatedConceptsHaveAllProperties(List> theConceptPropertyInputMap, - Multimap> theConceptMapToCvsMap) { + Multimap> theConceptMapToCvsMap) { TermCodeSystemVersion tcsVersion = getTermCodeSystemVersion(); - ourLog.info("Properties to process: {}", ourDecimalFormat.format(theConceptPropertyInputMap.size())); + ourLog.info("Properties to validate: {}", ourDecimalFormat.format(theConceptPropertyInputMap.size())); for (Map tcRecordMap : theConceptPropertyInputMap) { String recordCode = getRecordCode(tcRecordMap); - processedPropertiesCounter++; + validatedPropertiesCounter++; runInTransaction(() -> { Optional tcFomDbOpt = myTermConceptDao.findByCodeSystemAndCode(tcsVersion, recordCode); @@ -304,8 +291,8 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { () -> ourLog.error("Couldn't find TermConcept with code: {} in DB", recordCode)); }); - if (processedPropertiesCounter % 10_000 == 0) { - ourLog.info("Processed properties: {}", ourDecimalFormat.format(processedPropertiesCounter)); + if (validatedPropertiesCounter % 10_000 == 0) { + ourLog.info("Validated properties: {}", ourDecimalFormat.format(validatedPropertiesCounter)); } } ourLog.info(""); @@ -320,8 +307,17 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { } + /** + * For received TC: + * _ validates that code is same as record code + * _ calls mew properties validator + * _ calls MapTo properties validator + * @param theTermConcept the TermConcept to validate + * @param theRecordMap the map of propName -> propValue of all defined input properties for TC + * @param theConceptMapToCvsMap the map of TC-code -> List of pair (value, display) for each property defined in input MapTo.csv file + */ private void validateTermConceptEntry(TermConcept theTermConcept, - Map theRecordMap, Multimap> theConceptMapToCvsMap) { + Map theRecordMap, Multimap> theConceptMapToCvsMap) { String recordCode = getRecordCode(theRecordMap); if ( ! theTermConcept.getCode().equals(recordCode) ) { @@ -329,21 +325,53 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { } ourLog.trace("Validating new properties for TC with code: {}", theTermConcept.getCode()); - // map of TC property name | set of property values - HashMap> tcConceptPropertyMap = theTermConcept.getProperties().stream() + // map of TC property name -> pair(value, display) + HashMap>> tcCodeValueDisplayMap = theTermConcept.getProperties().stream() .collect(Collectors.groupingBy(TermConceptProperty::getKey, HashMap::new, - mapping(TermConceptProperty::getValue, toSet()))); + mapping(tcp -> Pair.of(tcp.getValue(), tcp.getDisplay()), toSet()))); - validateNewProperties(theTermConcept, theRecordMap, tcConceptPropertyMap); + validateNewProperties(theTermConcept, theRecordMap, tcCodeValueDisplayMap); Collection> toMapRecordForTermConcept = theConceptMapToCvsMap.get(recordCode); -// validateMapToProperties(recordCode, tcConceptPropertyMap, toMapRecordForTermConcept); + validateMapToProperties(recordCode, tcCodeValueDisplayMap, toMapRecordForTermConcept); } + /** + * For each received TC which has a MapTo record, validates that: + * _ each record property generated a MAP_TO value property in TC + * _ each not-null display property was set as the MAP_TO display property in TC + * @param theRecordCode key code of the record and TC + * @param theTcConceptPropertyMap map of propName -> pair(value, display) from input MapTo.csv (display can be null) + * @param theToMapRecordForTC the collection af MAP_TO property value-display pairs (display can be null) + */ + private void validateMapToProperties(String theRecordCode, + HashMap>> theTcConceptPropertyMap, + Collection> theToMapRecordForTC) { + + if (CollectionUtils.isEmpty(theToMapRecordForTC)) { return; } // no MapTo record for this TermConcept + + ourLog.trace("Validating MapTo properties for TC with code: {}", theRecordCode); + + Set> tcConceptProps = theTcConceptPropertyMap.get("MAP_TO"); + HashSet> theToMapRecordForTCAsSet = new HashSet<>(theToMapRecordForTC); + + mapToAsserts.add( () -> assertEquals(tcConceptProps, theToMapRecordForTCAsSet, "TermConcept for code: '" + + theRecordCode + "' 'MAP_TO' properties don't match MapTo.csv file properties") ); + + validatedMapToEntriesCounter++; + } + + + /** + * + * @param theTermConcept the TermConcept to validate + * @param theRecordPropsMap map of propName -> pair(value, display) from input MapTo.csv (display is nullable) + * @param theTcConceptPropertyMap the map propName -> Pair(value, display) of TC (display is nullable) + */ private void validateNewProperties(TermConcept theTermConcept, Map theRecordPropsMap, - HashMap> theTcConceptPropertyMap) { + HashMap>> theTcConceptPropertyMap) { // make sure we are good so far and both entries to compare are for same TermConcept code assertEquals(theTermConcept.getCode(), theRecordPropsMap.get("LOINC_NUM"), "theTcCode and record key (LOINC_NUM) must match"); @@ -356,60 +384,72 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { // bypass old properties if ( ! newRecordPropertyNames.contains(recordEntry.getKey()) ) { continue; } - Set tcConceptValues = theTcConceptPropertyMap.get(recordEntry.getKey()); + Set> tcPropsValueDisplay = theTcConceptPropertyMap.get(recordEntry.getKey()); if ( ASSOCIATED_OBSERVATIONS_PROP_NAME.equals(recordEntry.getKey()) ) { associatedObservationsCount++; - validateCodingProperties(theTermConcept, ASSOCIATED_OBSERVATIONS_PROP_NAME, recordEntry, tcConceptValues); + validateCodingProperties(theTermConcept, recordEntry.getKey(), recordEntry, tcPropsValueDisplay); continue; } if ( ASK_AT_ORDER_ENTRY_PROP_NAME.equals(recordEntry.getKey()) ) { askAtOrderEntryCount++; - validateCodingProperties(theTermConcept, ASK_AT_ORDER_ENTRY_PROP_NAME, recordEntry, tcConceptValues); + validateCodingProperties(theTermConcept, recordEntry.getKey(), recordEntry, tcPropsValueDisplay); continue; } - if (CollectionUtils.isEmpty(tcConceptValues)) { - ourLog.error("TermConcept with code: {} does not have property: {} which in csv file has value: {}", - theTermConcept.getCode(), recordEntry.getKey(), recordEntry.getValue()); + assertEquals(1, tcPropsValueDisplay.size(), "TermConcept with code: {} was expected to have 1 property " + + "with key: " + recordEntry.getKey() + " and value: " + recordEntry.getValue() + " but has: " + tcPropsValueDisplay.size() + " instead." ); + + String tcPropValue = tcPropsValueDisplay.iterator().next().getLeft(); + if ( ! recordEntry.getValue().equals(tcPropValue) ) { + ourLog.error("TermConcept with code: {} property: {} expected value: {}, found value: {}", + theTermConcept.getCode(), recordEntry.getKey(), recordEntry.getValue(), tcPropValue); } } } /** - * Validate that all file property codes become a "Coding" property on the TermConcept + * Validate that all file CODING properties become a "Coding" property on the TermConcept * and display properties are the display of the target TermConcept */ - private void validateCodingProperties(TermConcept theSourceTermConcept, String thePropName, - Map.Entry recordEntry, Set theTCPropValues) { + private void validateCodingProperties(TermConcept theTermConcept, String thePropName, + Map.Entry recordEntry, Set> theTCPropValueDisplaySet) { List recordPropertyCodes = parsePropertyCodeValues(recordEntry.getValue()); - // validate each property in the records was uploaded to the corresponding TermConcept - for (String recordPropertyCode : recordPropertyCodes) { - if ( ! theTCPropValues.contains(recordPropertyCode) ) { - ourLog.error("For TC code: {}, prop: {}, record code: {} not found among uploaded TC properties: {}", - theSourceTermConcept.getCode(), recordEntry.getKey(), recordPropertyCode, String.join(" - ", theTCPropValues)); - } + // validate that each property value in the records was uploaded to the corresponding TermConcept + List tcPropValues = theTCPropValueDisplaySet.stream().map(Pair::getLeft).collect(Collectors.toList()); + checkCodeSetsEqual(recordPropertyCodes, tcPropValues, theTermConcept.getCode(), thePropName); - // validate that the display value for each uploaded TC property of name thePropertyName is the display of the TC pointed by the TC code - validatePropertiesDisplay(theSourceTermConcept, thePropName, recordPropertyCode); + // validate that the display value for each uploaded TC property of name thePropName is the display of the TC pointed by the property code + validatePropertyDisplays(theTermConcept, thePropName); + } + + private void checkCodeSetsEqual(List theExpectedCodes, List theCreatedCodes, String theTcCode, String thePropName) { + if (theExpectedCodes.equals(theCreatedCodes)) return; + + // inform each expected code not present in TC + for (String recordPropertyCode : theExpectedCodes) { + if ( ! theCreatedCodes.contains(recordPropertyCode) ) { + ourLog.error("For TC code: {}, prop: {}, record code: {} not found among uploaded TC properties: {}", + theTcCode, thePropName, recordPropertyCode, theCreatedCodes); + } } - // also check that uploaded TC only has properties is has to have - for (String tcPropValue : theTCPropValues) { - if ( ! recordEntry.getValue().contains(tcPropValue)) { - ourLog.error("TC with code: {}, has a property with code: {}, which is not in defined property list: {}", - theSourceTermConcept.getCode(), tcPropValue, recordEntry.getValue()); + // inform each TC code not present in expected + for (String tcPropertyCode : theCreatedCodes) { + if ( ! theExpectedCodes.contains(tcPropertyCode) ) { + ourLog.error("TC with code: {}, prop: {}, TC code: {} not found among record properties: {}", + theTcCode, thePropName, theCreatedCodes, tcPropertyCode); } } } - private void validatePropertiesDisplay(TermConcept theSourceTermConcept, String thePropName, String recordPropertyCode) { - // from source TermConcept obtain the map of thePropName properties: property code - display + private void validatePropertyDisplays(TermConcept theSourceTermConcept, String thePropName) { + // from TermConcept obtain the map of thePropName properties: property code - display Map srcTcCodeDisplayMap = theSourceTermConcept.getProperties().stream() .filter(p -> p.getKey().equals(thePropName)) .collect(Collectors.toMap(TermConceptProperty::getValue, TermConceptProperty::getDisplay)); @@ -418,13 +458,14 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { Optional targetTermConceptOpt = myTermConceptDao.findByCodeSystemAndCode(termCodeSystemVersion, tcCodeDisplayEntry.getKey()); if (targetTermConceptOpt.isEmpty()) { - ourLog.error("For TC code: {}, target TC with code: {} is not present in DB", theSourceTermConcept.getCode(), recordPropertyCode); - } - - TermConcept targetTermConcept = targetTermConceptOpt.get(); - if ( ! tcCodeDisplayEntry.getValue().equals(targetTermConcept.getDisplay()) ) { - ourLog.error("For TC with code: {}, display is: {}, while target TC display is: {}", - theSourceTermConcept.getCode(), tcCodeDisplayEntry.getValue(), targetTermConcept.getDisplay()); + ourLog.error("For TC code: {}, target TC with code: {} is not present in DB", + theSourceTermConcept.getCode(), tcCodeDisplayEntry.getKey()); + } else { + TermConcept targetTermConcept = targetTermConceptOpt.get(); + if ( ! tcCodeDisplayEntry.getValue().equals(targetTermConcept.getDisplay()) ) { + ourLog.error("For TC with code: {}, display is: {}, while target TC display is: {}", + theSourceTermConcept.getCode(), tcCodeDisplayEntry.getValue(), targetTermConcept.getDisplay()); + } } } } @@ -438,14 +479,9 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { private List> readLoincCsvRecordsAsMap() throws Exception { - CSVParser parser = getParserForZipFile(LOINC_ZIP_CLASSPATH, LOINC_CSV_ZIP_ENTRY_PATH); + CSVParser parser = getParserForZipFile(LOINC_CSV_ZIP_ENTRY_PATH); Iterator iter = parser.iterator(); - Map headerMap = parser.getHeaderMap(); - recordPropertyNames = headerMap.entrySet().stream() - .sorted(Comparator.comparingInt(Map.Entry::getValue)) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); ourLog.debug("Header map: {}", parser.getHeaderMap()); int count = 0; @@ -461,14 +497,43 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { records.add( nextRecord.toMap() ); count++; } - ourLog.info("Read and mapped {} csv file lines", count); + ourLog.info("Read and mapped {} {} file lines", ourDecimalFormat.format(count), LOINC_CSV_ZIP_ENTRY_PATH); return records; } + private Multimap> readMapToCsvRecordsAsMap() throws Exception { + CSVParser parser = getParserForZipFile(LOINC_MAP_TO_ZIP_ENTRY_PATH); + + ourLog.debug("Header map: {}", parser.getHeaderMap()); + + Multimap> records = ArrayListMultimap.create(); + int count = 0; + + for (CSVRecord nextRecord : parser) { + if (!nextRecord.isConsistent()) { + ourLog.error("Inconsistent record"); + continue; + } + String code = nextRecord.get(LoincMapToHandler.CONCEPT_CODE_PROP_NAME); + assertNotNull(code, "MapTo record with blank '" + LoincMapToHandler.CONCEPT_CODE_PROP_NAME + "' field: " + nextRecord); + String toValue = nextRecord.get(LoincMapToHandler.MAP_TO_PROP_NAME); + assertNotNull(code, "MapTo record with blank '" + LoincMapToHandler.MAP_TO_PROP_NAME + "' field: " + nextRecord); + + records.put(code, Pair.of(toValue, nextRecord.get(LoincMapToHandler.DISPLAY_PROP_NAME))); + count++; + } + ourLog.info("Read and mapped {} {} file lines into {} map entries", ourDecimalFormat.format(count), + LOINC_MAP_TO_ZIP_ENTRY_PATH, ourDecimalFormat.format(records.asMap().size())); + return records; + } + + + + @Nonnull - private CSVParser getParserForZipFile(String theZipFileClassPath, String theFileEntryPath) throws Exception { - Reader reader = new StringReader(getCvsStringFromZip(theZipFileClassPath, theFileEntryPath)); + private CSVParser getParserForZipFile(String theFileEntryPath) throws Exception { + Reader reader = new StringReader(getCvsStringFromZip(LOINC_ZIP_CLASSPATH, theFileEntryPath)); CSVFormat format = CSVFormat .newFormat(',') @@ -501,7 +566,7 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { int dbVersionedTermConceptCount = runInTransaction(() -> myTermConceptDao.countByCodeSystemVersion(tcsvId) ); ourLog.info("=================> Number of stored concepts for version {}: {}", - CS_VERSION, ourDecimalFormat.format(dbVersionedTermConceptCount)); + CS_VERSION, ourDecimalFormat.format(dbVersionedTermConceptCount) ); assertEquals(CS_CONCEPTS_COUNT, dbVersionedTermConceptCount); } @@ -534,6 +599,34 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest { } + /** + * Used occasionally for some manual validation - don't delete + */ + private void queryForSpecificValueSet() { + runInTransaction(() -> { + Query q = myEntityManager.createQuery("from ForcedId where myForcedId like 'LG8749-6%'"); + @SuppressWarnings("unchecked") + List fIds = (List) q.getResultList(); + long res_id = fIds.stream().map(ForcedId::getId).sorted().findFirst().orElse(fail("ForcedId not found")); + + Query q1 = myEntityManager.createQuery("from ResourceTable where id = " + res_id); + @SuppressWarnings("unchecked") + List vsList = (List) q1.getResultList(); + assertEquals(1, vsList.size()); + long vsLongId = vsList.get(0).getId(); + ValueSet vs = (ValueSet) myValueSetDao.toResource( vsList.get(0), false ); + assertNotNull(vs); + + Query q2 = myEntityManager.createQuery("from TermValueSet where myResource = " + vsLongId); + @SuppressWarnings("unchecked") + List tvsList = (List) q2.getResultList(); + assertEquals(1, tvsList.size()); + + TermValueSet termValueSet = tvsList.get(0); + }); + } + + // List of all columns in Loinc.csv input file // private static final String[] recordFieldNames = { // "LOINC_NUM" diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/resources/loinc-full/readme.txt b/hapi-fhir-jpaserver-test-utilities/src/test/resources/loinc-full/readme.txt index 3c8904c22ac..7e469bfa35e 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/resources/loinc-full/readme.txt +++ b/hapi-fhir-jpaserver-test-utilities/src/test/resources/loinc-full/readme.txt @@ -1,10 +1,8 @@ -LoincFullLoadR4SandboxIT requires this directory must contain the following -three files: - _ Loinc.csv.gz - _ Loinc_1.11.zip and +LoincFullLoadR4SandboxIT requires this directory must contain the following files: + _ Loinc_1.11.zip _ v1.11_loincupload.properties -but those files are too large for the repo, so before running this test you need to copy them from: +but zip file is too large for the repo, so before running this test you need to copy it from: https://drive.google.com/drive/folders/18be2R5IurlWnugkl18nDG7wrwPsOtfR-?usp=sharing (SmileCDR has access)