Issue 4106 process map to properties when uploading loinc terminology (#4231)
* Add loinc upload handler to process MAP_TO properties. * Move test to test folder * Fix mapto file location property name * Allow override as full loinc upload tests take (much) longer than defined timeout * Move handler later to make sure all concepts were added to map * Update test to check all project enhancements * Restore project unrelated changes * Rename base test class properly and move to right package * Condition MapTo.csv file processing to file presence for backwards as file is optional * Document and annotate test-only usage for disabling deferred tasks timeout Co-authored-by: juan.marchionatto <juan.marchionatto@smilecdr.com>
This commit is contained in:
parent
3053a1dcf9
commit
6cf5989d6c
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4106
|
||||||
|
title: "LOINC terminology upload process was enhanced by loading `MAP_TO` properties defined in MapTo.csv input file to TermConcept(s)."
|
|
@ -143,6 +143,10 @@ public class LoadedFileDescriptors implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boolean isOptionalFilesExist(List<String> theFileList) {
|
||||||
|
return notFound(theFileList).isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
void verifyPartLinkFilesExist(List<String> theMultiPartLinkFiles, String theSinglePartLinkFile) {
|
void verifyPartLinkFilesExist(List<String> theMultiPartLinkFiles, String theSinglePartLinkFile) {
|
||||||
List<String> notFoundMulti = notFound(theMultiPartLinkFiles);
|
List<String> notFoundMulti = notFound(theMultiPartLinkFiles);
|
||||||
List<String> notFoundSingle = notFound(Arrays.asList(theSinglePartLinkFile));
|
List<String> notFoundSingle = notFound(Arrays.asList(theSinglePartLinkFile));
|
||||||
|
|
|
@ -78,6 +78,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class);
|
||||||
private static final long SAVE_ALL_DEFERRED_WARN_MINUTES = 1;
|
private static final long SAVE_ALL_DEFERRED_WARN_MINUTES = 1;
|
||||||
private static final long SAVE_ALL_DEFERRED_ERROR_MINUTES = 5;
|
private static final long SAVE_ALL_DEFERRED_ERROR_MINUTES = 5;
|
||||||
|
private boolean myAllowDeferredTasksTimeout = true;
|
||||||
private final List<TermCodeSystem> myDeferredCodeSystemsDeletions = Collections.synchronizedList(new ArrayList<>());
|
private final List<TermCodeSystem> myDeferredCodeSystemsDeletions = Collections.synchronizedList(new ArrayList<>());
|
||||||
private final Queue<TermCodeSystemVersion> myDeferredCodeSystemVersionsDeletions = new ConcurrentLinkedQueue<>();
|
private final Queue<TermCodeSystemVersion> myDeferredCodeSystemVersionsDeletions = new ConcurrentLinkedQueue<>();
|
||||||
private final List<TermConcept> myDeferredConcepts = Collections.synchronizedList(new ArrayList<>());
|
private final List<TermConcept> myDeferredConcepts = Collections.synchronizedList(new ArrayList<>());
|
||||||
|
@ -274,13 +275,18 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void saveAllDeferred() {
|
public void saveAllDeferred() {
|
||||||
TimeoutManager timeoutManager = new TimeoutManager(TermDeferredStorageSvcImpl.class.getName() + ".saveAllDeferred()",
|
TimeoutManager timeoutManager = null;
|
||||||
Duration.of(SAVE_ALL_DEFERRED_WARN_MINUTES, ChronoUnit.MINUTES),
|
if (myAllowDeferredTasksTimeout) {
|
||||||
Duration.of(SAVE_ALL_DEFERRED_ERROR_MINUTES, ChronoUnit.MINUTES));
|
timeoutManager = new TimeoutManager(TermDeferredStorageSvcImpl.class.getName() + ".saveAllDeferred()",
|
||||||
|
Duration.of(SAVE_ALL_DEFERRED_WARN_MINUTES, ChronoUnit.MINUTES),
|
||||||
|
Duration.of(SAVE_ALL_DEFERRED_ERROR_MINUTES, ChronoUnit.MINUTES));
|
||||||
|
}
|
||||||
|
|
||||||
while (!isStorageQueueEmpty()) {
|
while (!isStorageQueueEmpty()) {
|
||||||
if (timeoutManager.checkTimeout()) {
|
if (myAllowDeferredTasksTimeout) {
|
||||||
ourLog.info(toString());
|
if (timeoutManager.checkTimeout()) {
|
||||||
|
ourLog.info(toString());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
saveDeferred();
|
saveDeferred();
|
||||||
}
|
}
|
||||||
|
@ -486,6 +492,8 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
||||||
void setCodeSystemVersionDaoForUnitTest(ITermCodeSystemVersionDao theCodeSystemVersionDao) {
|
void setCodeSystemVersionDaoForUnitTest(ITermCodeSystemVersionDao theCodeSystemVersionDao) {
|
||||||
myCodeSystemVersionDao = theCodeSystemVersionDao;
|
myCodeSystemVersionDao = theCodeSystemVersionDao;
|
||||||
}
|
}
|
||||||
|
@Override
|
||||||
|
public void disallowDeferredTaskTimeout() { myAllowDeferredTasksTimeout = false; }
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.term.loinc.LoincIeeeMedicalDeviceCodeHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincImagingDocumentCodeHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincImagingDocumentCodeHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincLinguisticVariantHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincLinguisticVariantHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincLinguisticVariantsHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincLinguisticVariantsHandler;
|
||||||
|
import ca.uhn.fhir.jpa.term.loinc.LoincMapToHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincParentGroupFileHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincParentGroupFileHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincPartHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincPartHandler;
|
||||||
import ca.uhn.fhir.jpa.term.loinc.LoincPartLinkHandler;
|
import ca.uhn.fhir.jpa.term.loinc.LoincPartLinkHandler;
|
||||||
|
@ -112,6 +113,8 @@ import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUIS
|
||||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT;
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT;
|
||||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_PATH;
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_PATH;
|
||||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT;
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT;
|
||||||
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_MAPTO_FILE;
|
||||||
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_MAPTO_FILE_DEFAULT;
|
||||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE;
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE;
|
||||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE_DEFAULT;
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE_DEFAULT;
|
||||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE;
|
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE;
|
||||||
|
@ -260,6 +263,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
uploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()),
|
uploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()),
|
||||||
uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()),
|
uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()),
|
||||||
uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()),
|
uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()),
|
||||||
|
uploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()),
|
||||||
|
|
||||||
//-- optional consumer name
|
//-- optional consumer name
|
||||||
uploadProperties.getProperty(LOINC_CONSUMER_NAME_FILE.getCode(), LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()),
|
uploadProperties.getProperty(LOINC_CONSUMER_NAME_FILE.getCode(), LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()),
|
||||||
|
@ -398,7 +402,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
public UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||||
ourLog.info("Processing terminology delta ADD for system[{}] with files: {}", theSystem, theFiles.stream().map(t -> t.getFilename()).collect(Collectors.toList()));
|
ourLog.info("Processing terminology delta ADD for system[{}] with files: {}", theSystem, theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList()));
|
||||||
try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) {
|
try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) {
|
||||||
CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, false);
|
CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, false);
|
||||||
return myCodeSystemStorageSvc.applyDeltaCodeSystemsAdd(theSystem, terminologySet);
|
return myCodeSystemStorageSvc.applyDeltaCodeSystemsAdd(theSystem, terminologySet);
|
||||||
|
@ -407,7 +411,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public UploadStatistics loadDeltaRemove(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
public UploadStatistics loadDeltaRemove(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||||
ourLog.info("Processing terminology delta REMOVE for system[{}] with files: {}", theSystem, theFiles.stream().map(t -> t.getFilename()).collect(Collectors.toList()));
|
ourLog.info("Processing terminology delta REMOVE for system[{}] with files: {}", theSystem, theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList()));
|
||||||
try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) {
|
try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) {
|
||||||
CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, true);
|
CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, true);
|
||||||
return myCodeSystemStorageSvc.applyDeltaCodeSystemsRemove(theSystem, terminologySet);
|
return myCodeSystemStorageSvc.applyDeltaCodeSystemsRemove(theSystem, terminologySet);
|
||||||
|
@ -574,8 +578,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
|
|
||||||
int valueSetCount = valueSets.size();
|
int valueSetCount = valueSets.size();
|
||||||
int rootConceptCount = codeSystemVersion.getConcepts().size();
|
int rootConceptCount = codeSystemVersion.getConcepts().size();
|
||||||
int conceptCount = rootConceptCount;
|
ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", rootConceptCount, rootConceptCount, valueSetCount);
|
||||||
ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", conceptCount, rootConceptCount, valueSetCount);
|
|
||||||
|
|
||||||
// remove this when fully implemented ...
|
// remove this when fully implemented ...
|
||||||
throw new InternalErrorException(Msg.code(874) + "HLA nomenclature terminology upload not yet fully implemented.");
|
throw new InternalErrorException(Msg.code(874) + "HLA nomenclature terminology upload not yet fully implemented.");
|
||||||
|
@ -716,13 +719,19 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
handler = new LoincLinguisticVariantsHandler(linguisticVariants);
|
handler = new LoincLinguisticVariantsHandler(linguisticVariants);
|
||||||
iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_FILE.getCode(), LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_FILE.getCode(), LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
String langFileName = null;
|
String langFileName;
|
||||||
for (LoincLinguisticVariantsHandler.LinguisticVariant linguisticVariant : linguisticVariants) {
|
for (LoincLinguisticVariantsHandler.LinguisticVariant linguisticVariant : linguisticVariants) {
|
||||||
handler = new LoincLinguisticVariantHandler(code2concept, linguisticVariant.getLanguageCode());
|
handler = new LoincLinguisticVariantHandler(code2concept, linguisticVariant.getLanguageCode());
|
||||||
langFileName = linguisticVariant.getLinguisticVariantFileName();
|
langFileName = linguisticVariant.getLinguisticVariantFileName();
|
||||||
iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_PATH.getCode() + langFileName, LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + langFileName), handler, ',', QuoteMode.NON_NUMERIC, false);
|
iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_PATH.getCode() + langFileName, LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + langFileName), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (theDescriptors.isOptionalFilesExist(List.of(theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode())))) {
|
||||||
|
// LOINC MapTo codes (last to make sure that all concepts were added to code2concept map)
|
||||||
|
handler = new LoincMapToHandler(code2concept);
|
||||||
|
iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
}
|
||||||
|
|
||||||
if (theCloseFiles) {
|
if (theCloseFiles) {
|
||||||
IOUtils.closeQuietly(theDescriptors);
|
IOUtils.closeQuietly(theDescriptors);
|
||||||
}
|
}
|
||||||
|
@ -801,7 +810,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
ourLog.info("Looking for root codes");
|
ourLog.info("Looking for root codes");
|
||||||
rootConcepts
|
rootConcepts
|
||||||
.entrySet()
|
.entrySet()
|
||||||
.removeIf(theStringTermConceptEntry -> theStringTermConceptEntry.getValue().getParents().isEmpty() == false);
|
.removeIf(theStringTermConceptEntry -> !theStringTermConceptEntry.getValue().getParents().isEmpty());
|
||||||
|
|
||||||
ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(), code2concept.size());
|
ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(), code2concept.size());
|
||||||
|
|
||||||
|
@ -857,7 +866,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
int nextLoggedCount = 0;
|
int nextLoggedCount = 0;
|
||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
CSVRecord nextRecord = iter.next();
|
CSVRecord nextRecord = iter.next();
|
||||||
if (nextRecord.isConsistent() == false) {
|
if (!nextRecord.isConsistent()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
theHandler.accept(nextRecord);
|
theHandler.accept(nextRecord);
|
||||||
|
@ -945,6 +954,6 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||||
if (termConceptProperties == null)
|
if (termConceptProperties == null)
|
||||||
return new TermConceptProperty();
|
return new TermConceptProperty();
|
||||||
Optional<TermConceptProperty> termConceptProperty = termConceptProperties.stream().filter(property -> key.equals(property.getKey())).findFirst();
|
Optional<TermConceptProperty> termConceptProperty = termConceptProperties.stream().filter(property -> key.equals(property.getKey())).findFirst();
|
||||||
return termConceptProperty.isPresent() ? termConceptProperty.get() : new TermConceptProperty();
|
return termConceptProperty.orElseGet(TermConceptProperty::new);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,4 +65,9 @@ public interface ITermDeferredStorageSvc {
|
||||||
void saveAllDeferred();
|
void saveAllDeferred();
|
||||||
|
|
||||||
void logQueueForUnitTest();
|
void logQueueForUnitTest();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Only to be used from tests - Disallow test timeouts on deferred tasks
|
||||||
|
*/
|
||||||
|
void disallowDeferredTaskTimeout();
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,79 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.loinc;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv;
|
||||||
|
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles addition of MAP_TO properties to TermConcepts
|
||||||
|
*/
|
||||||
|
public class LoincMapToHandler implements IZipContentsHandlerCsv {
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(LoincMapToHandler.class);
|
||||||
|
|
||||||
|
public static final String CONCEPT_CODE_PROP_NAME = "LOINC";
|
||||||
|
public static final String MAP_TO_PROP_NAME = "MAP_TO";
|
||||||
|
public static final String DISPLAY_PROP_NAME = "COMMENT";
|
||||||
|
|
||||||
|
private final Map<String, TermConcept> myCode2Concept;
|
||||||
|
|
||||||
|
public LoincMapToHandler(Map<String, TermConcept> theCode2concept) {
|
||||||
|
myCode2Concept = theCode2concept;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
String code = trim(theRecord.get(CONCEPT_CODE_PROP_NAME));
|
||||||
|
String mapTo = trim(theRecord.get(MAP_TO_PROP_NAME));
|
||||||
|
String display = trim(theRecord.get(DISPLAY_PROP_NAME));
|
||||||
|
|
||||||
|
if (isBlank(code)) {
|
||||||
|
ourLog.warn("MapTo record was found with a blank '" + CONCEPT_CODE_PROP_NAME + "' property");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isBlank(mapTo)) {
|
||||||
|
ourLog.warn("MapTo record was found with a blank '" + MAP_TO_PROP_NAME + "' property");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
TermConcept concept = myCode2Concept.get(code);
|
||||||
|
if (concept == null) {
|
||||||
|
ourLog.warn("A TermConcept was not found for MapTo '" + CONCEPT_CODE_PROP_NAME +
|
||||||
|
"' property: '" + code + "' MapTo record ignored.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
concept.addPropertyCoding(MAP_TO_PROP_NAME, ITermLoaderSvc.LOINC_URI, mapTo, display);
|
||||||
|
ourLog.trace("Adding " + MAP_TO_PROP_NAME + " coding property: {} to concept.code {}", mapTo, concept.getCode());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -35,6 +35,9 @@ public enum LoincUploadPropertiesEnum {
|
||||||
LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"),
|
LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"),
|
||||||
LOINC_XML_FILE("loinc.xml"),
|
LOINC_XML_FILE("loinc.xml"),
|
||||||
|
|
||||||
|
LOINC_MAPTO_FILE("loinc.mapto.file"),
|
||||||
|
LOINC_MAPTO_FILE_DEFAULT("LoincTable/MapTo.csv"),
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* MANDATORY
|
* MANDATORY
|
||||||
*/
|
*/
|
||||||
|
@ -153,7 +156,7 @@ public enum LoincUploadPropertiesEnum {
|
||||||
|
|
||||||
public static LoincUploadPropertiesEnum fromCode(String theCode) {
|
public static LoincUploadPropertiesEnum fromCode(String theCode) {
|
||||||
if (ourValues == null) {
|
if (ourValues == null) {
|
||||||
HashMap<String, LoincUploadPropertiesEnum> values = new HashMap<String, LoincUploadPropertiesEnum>();
|
HashMap<String, LoincUploadPropertiesEnum> values = new HashMap<>();
|
||||||
for (LoincUploadPropertiesEnum next : values()) {
|
for (LoincUploadPropertiesEnum next : values()) {
|
||||||
values.put(next.getCode(), next);
|
values.put(next.getCode(), next);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package ca.uhn.fhir.jpa.term;
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||||
|
import ca.uhn.fhir.jpa.test.BaseValueSetHSearchExpansionR4Test;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
import org.springframework.test.context.ContextConfiguration;
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||||
|
@ -10,6 +11,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||||
*/
|
*/
|
||||||
@ExtendWith(SpringExtension.class)
|
@ExtendWith(SpringExtension.class)
|
||||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class)
|
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class)
|
||||||
public class ValueSetHSearchExpansionR4ElasticIT extends AbstractValueSetHSearchExpansionR4Test {
|
public class ValueSetHSearchExpansionR4ElasticIT extends BaseValueSetHSearchExpansionR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package ca.uhn.fhir.jpa.term;
|
package ca.uhn.fhir.jpa.term;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.test.BaseValueSetHSearchExpansionR4Test;
|
||||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
@ -12,6 +13,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||||
*/
|
*/
|
||||||
@ExtendWith(SpringExtension.class)
|
@ExtendWith(SpringExtension.class)
|
||||||
@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.DefaultLuceneHeap.class})
|
@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.DefaultLuceneHeap.class})
|
||||||
public class ValueSetHSearchExpansionR4LuceneIT extends AbstractValueSetHSearchExpansionR4Test {
|
public class ValueSetHSearchExpansionR4LuceneIT extends BaseValueSetHSearchExpansionR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package ca.uhn.fhir.jpa.term;
|
package ca.uhn.fhir.jpa.test;
|
||||||
|
|
||||||
/*-
|
/*-
|
||||||
* #%L
|
* #%L
|
||||||
|
@ -36,11 +36,15 @@ import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||||
|
import ca.uhn.fhir.jpa.term.IValueSetConceptAccumulator;
|
||||||
|
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||||
|
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||||
|
import ca.uhn.fhir.jpa.term.TermReadSvcImpl;
|
||||||
|
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||||
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
||||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
|
||||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
@ -109,8 +113,8 @@ import static org.mockito.Mockito.when;
|
||||||
//@ExtendWith(SpringExtension.class)
|
//@ExtendWith(SpringExtension.class)
|
||||||
//@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.DefaultLuceneHeap.class})
|
//@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.DefaultLuceneHeap.class})
|
||||||
//@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class})
|
//@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class})
|
||||||
public abstract class AbstractValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(AbstractValueSetHSearchExpansionR4Test.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(BaseValueSetHSearchExpansionR4Test.class);
|
||||||
|
|
||||||
private static final String CS_URL = "http://example.com/my_code_system";
|
private static final String CS_URL = "http://example.com/my_code_system";
|
||||||
private static final String CS_URL_2 = "http://example.com/my_code_system2";
|
private static final String CS_URL_2 = "http://example.com/my_code_system2";
|
|
@ -15,7 +15,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
import ca.uhn.fhir.jpa.term.loinc.LoincMapToHandler;
|
||||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||||
|
@ -35,6 +35,7 @@ import org.hibernate.dialect.PostgreSQL10Dialect;
|
||||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||||
import org.hl7.fhir.r4.model.Coding;
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Disabled;
|
import org.junit.jupiter.api.Disabled;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
@ -64,7 +65,6 @@ import java.text.DecimalFormat;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
@ -89,38 +89,46 @@ import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sandbox test (not intended to run on CI build) so must be kept disabled
|
* Sandbox test (not intended to run on CI build) so must be kept disabled
|
||||||
|
* It has two running modes (load a DB and test loaded data) defined by the property LOAD_DB
|
||||||
*
|
*
|
||||||
* Requires the loinc-full resource directory to contain the following files:
|
* Requires the loinc-full resource directory to contain the following files:
|
||||||
* _ Loinc_1.11.zip
|
* _ Loinc_1.11.zip
|
||||||
* _ v1.11_loincupload.properties
|
* _ v1.11_loincupload.properties
|
||||||
*
|
*
|
||||||
* but last one is too large for the repo, so before running this test, copy it from:
|
* but first one is too large for the repo, so before running this test, copy it from:
|
||||||
* https://drive.google.com/drive/folders/18be2R5IurlWnugkl18nDG7wrwPsOtfR-?usp=sharing
|
* <a href="https://drive.google.com/drive/folders/18be2R5IurlWnugkl18nDG7wrwPsOtfR-?usp=sharing">here</a>
|
||||||
* (SmileCDR has access)
|
* (SmileCDR has access)
|
||||||
*
|
*
|
||||||
* Can be executed with Lucene or Elastic configuration
|
* Can be executed with Lucene, Elastic or no FT configuration
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@Disabled("Sandbox test")
|
@Disabled("Sandbox test")
|
||||||
@ExtendWith(SpringExtension.class)
|
@ExtendWith(SpringExtension.class)
|
||||||
@ContextConfiguration(classes = {
|
@ContextConfiguration(classes = {
|
||||||
LoincFullLoadR4SandboxIT.NoopMandatoryTransactionListener.class
|
LoincFullLoadR4SandboxIT.NoopMandatoryTransactionListener.class
|
||||||
|
|
||||||
|
// one of the following needs to be present
|
||||||
|
// TestR4Config.class // uses in-memory DB
|
||||||
,LoincFullLoadR4SandboxIT.OverriddenR4Config.class // your configured persistent DB
|
,LoincFullLoadR4SandboxIT.OverriddenR4Config.class // your configured persistent DB
|
||||||
// pick up elastic, lucene or no-full-text engine:
|
|
||||||
|
// pick up elastic or lucene engine:
|
||||||
,TestHSearchAddInConfig.NoFT.class
|
,TestHSearchAddInConfig.NoFT.class
|
||||||
})
|
})
|
||||||
public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(LoincFullLoadR4SandboxIT.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(LoincFullLoadR4SandboxIT.class);
|
||||||
|
|
||||||
|
private static final DecimalFormat ourDecimalFormat = new DecimalFormat("#,###");
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static final boolean USE_REAL_DB = true;
|
public static final boolean USE_REAL_DB = true;
|
||||||
public static final boolean LOAD_DB = false;
|
public static final boolean LOAD_DB = false;
|
||||||
public static final String DB_NAME = "cdr_loinc_display";
|
public static final String DB_NAME = "testDB_mapto";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private static final DecimalFormat ourDecimalFormat = new DecimalFormat("#,###");
|
|
||||||
public static final String LOINC_URL = "http://loinc.org";
|
public static final String LOINC_URL = "http://loinc.org";
|
||||||
public static final String TEST_FILES_CLASSPATH = "loinc-full/";
|
public static final String TEST_FILES_CLASSPATH = "loinc-full/";
|
||||||
|
|
||||||
|
@ -133,11 +141,9 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
|
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------------------
|
||||||
// full LOINC file 1.11 Initially cloned from 2.73 for tests, with custom lonc.xml file with added 24 new properties
|
// full LOINC file 1.11 (initially cloned from 2.73 for tests, with custom lonc.xml file with added 24 new properties)
|
||||||
// Note that internal defined version is 2.78
|
|
||||||
|
|
||||||
// public static final String CS_VERSION = "1.11";
|
public static final String CS_VERSION = "1.11";
|
||||||
public static final String CS_VERSION = "2.78";
|
|
||||||
public static final int CS_CONCEPTS_COUNT = 234_390;
|
public static final int CS_CONCEPTS_COUNT = 234_390;
|
||||||
public static final int ASSOCIATED_OBSERVATIONS_COUNT = 8_058;
|
public static final int ASSOCIATED_OBSERVATIONS_COUNT = 8_058;
|
||||||
public static final int ASK_AT_ORDER_ENTRY_COUNT = 65;
|
public static final int ASK_AT_ORDER_ENTRY_COUNT = 65;
|
||||||
|
@ -158,7 +164,6 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
@Autowired private EntityManager myEntityManager;
|
@Autowired private EntityManager myEntityManager;
|
||||||
@Autowired private TermLoaderSvcImpl myTermLoaderSvc;
|
@Autowired private TermLoaderSvcImpl myTermLoaderSvc;
|
||||||
@Autowired private ITermConceptDao myTermConceptDao;
|
@Autowired private ITermConceptDao myTermConceptDao;
|
||||||
@Autowired private ITermReadSvc myTermReadSvc;
|
|
||||||
@Autowired private ITermDeferredStorageSvc myTerminologyDeferredStorageSvc;
|
@Autowired private ITermDeferredStorageSvc myTerminologyDeferredStorageSvc;
|
||||||
@Autowired private ITermCodeSystemDao myTermCodeSystemDao;
|
@Autowired private ITermCodeSystemDao myTermCodeSystemDao;
|
||||||
@Autowired private ITermCodeSystemVersionDao myTermCodeSystemVersionDao;
|
@Autowired private ITermCodeSystemVersionDao myTermCodeSystemVersionDao;
|
||||||
|
@ -171,10 +176,9 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
|
|
||||||
private int associatedObservationsCount = 0;
|
private int associatedObservationsCount = 0;
|
||||||
private int askAtOrderEntryCount = 0;
|
private int askAtOrderEntryCount = 0;
|
||||||
private int processedPropertiesCounter = 0;
|
private int validatedPropertiesCounter = 0;
|
||||||
|
private int validatedMapToEntriesCounter = 0;
|
||||||
private static List<String> recordPropertyNames;
|
private final static List<String> newRecordPropertyNames = List.of(
|
||||||
private static List<String> newRecordPropertyNames = List.of(
|
|
||||||
"CHNG_TYPE",
|
"CHNG_TYPE",
|
||||||
"DefinitionDescription",
|
"DefinitionDescription",
|
||||||
"CONSUMER_NAME",
|
"CONSUMER_NAME",
|
||||||
|
@ -201,6 +205,16 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
"ValidHL7AttachmentRequest"
|
"ValidHL7AttachmentRequest"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
if (LOAD_DB) {
|
||||||
|
// real load requires longer time than allowed for unit tests
|
||||||
|
myTerminologyDeferredStorageSvc.disallowDeferredTaskTimeout();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test()
|
@Test()
|
||||||
public void uploadLoincCodeSystem() throws Exception {
|
public void uploadLoincCodeSystem() throws Exception {
|
||||||
|
|
||||||
|
@ -214,7 +228,7 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
|
|
||||||
// save all deferred concepts, properties, links, etc
|
// save all deferred concepts, properties, links, etc
|
||||||
sw.restart();
|
sw.restart();
|
||||||
saveAllDeferredNoTimeout();
|
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||||
|
|
||||||
ourLog.info("=================> Saving all terminology deferred entities took {}", sw);
|
ourLog.info("=================> Saving all terminology deferred entities took {}", sw);
|
||||||
validateSavedConceptsCount();
|
validateSavedConceptsCount();
|
||||||
|
@ -234,13 +248,14 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
// query each code and validate that all properties in both maps are set
|
// query each code and validate that all properties in both maps are set
|
||||||
|
|
||||||
List<Map<String, String>> conceptPropertyCvsMap = readLoincCsvRecordsAsMap();
|
List<Map<String, String>> conceptPropertyCvsMap = readLoincCsvRecordsAsMap();
|
||||||
Multimap<String, Pair<String, String>> conceptMapToCvsMap = ArrayListMultimap.create();
|
Multimap<String, Pair<String, String>> conceptMapToCvsMap = readMapToCsvRecordsAsMap();
|
||||||
|
|
||||||
validateCreatedConceptsHaveAllProperties( conceptPropertyCvsMap, conceptMapToCvsMap );
|
validateCreatedConceptsHaveAllProperties( conceptPropertyCvsMap, conceptMapToCvsMap );
|
||||||
|
|
||||||
ourLog.info("Processed properties : {}", processedPropertiesCounter);
|
ourLog.info("Validated properties :{}", String.format("%,6d", validatedPropertiesCounter));
|
||||||
ourLog.info("associatedObservationsCount : {}", associatedObservationsCount);
|
ourLog.info("Validated MapTo entries :{}", String.format("%,6d", validatedMapToEntriesCounter));
|
||||||
ourLog.info("askAtOrderEntryCount : {}", askAtOrderEntryCount);
|
ourLog.info("associatedObservationsCount :{}", String.format("%,6d", associatedObservationsCount));
|
||||||
|
ourLog.info("askAtOrderEntryCount :{}", String.format("%,6d", askAtOrderEntryCount));
|
||||||
ourLog.info("");
|
ourLog.info("");
|
||||||
|
|
||||||
assertEquals(ASK_AT_ORDER_ENTRY_COUNT, askAtOrderEntryCount);
|
assertEquals(ASK_AT_ORDER_ENTRY_COUNT, askAtOrderEntryCount);
|
||||||
|
@ -252,50 +267,22 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void saveAllDeferredNoTimeout() {
|
|
||||||
while( ! myTerminologyDeferredStorageSvc.isStorageQueueEmpty() ) {
|
|
||||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used occasionally for some manual validation - don't delete
|
* Calls validators for each TC for the code in each record in theConceptPropertyInputMap.
|
||||||
|
* @param theConceptPropertyInputMap records in loinc.csv input file mapped by propertyName -> propertyValue
|
||||||
|
* @param theConceptMapToCvsMap records in MapTo.csv input file mapped by TC-code -> List of Pair (value, display)
|
||||||
*/
|
*/
|
||||||
private void queryForSpecificValueSet() {
|
|
||||||
runInTransaction(() -> {
|
|
||||||
Query q = myEntityManager.createQuery("from ForcedId where myForcedId like 'LG8749-6%'");
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
List<ForcedId> fIds = (List<ForcedId>) q.getResultList();
|
|
||||||
long res_id = fIds.stream().map(ForcedId::getId).sorted().findFirst().orElse(fail("ForcedId not found"));
|
|
||||||
|
|
||||||
Query q1 = myEntityManager.createQuery("from ResourceTable where id = " + res_id);
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
List<ResourceTable> vsList = (List<ResourceTable>) q1.getResultList();
|
|
||||||
assertEquals(1, vsList.size());
|
|
||||||
long vsLongId = vsList.get(0).getId();
|
|
||||||
ValueSet vs = (ValueSet) myValueSetDao.toResource( vsList.get(0), false );
|
|
||||||
assertNotNull(vs);
|
|
||||||
|
|
||||||
Query q2 = myEntityManager.createQuery("from TermValueSet where myResource = " + vsLongId);
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
List<TermValueSet> tvsList = (List<TermValueSet>) q2.getResultList();
|
|
||||||
assertEquals(1, tvsList.size());
|
|
||||||
|
|
||||||
TermValueSet termValueSet = tvsList.get(0);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void validateCreatedConceptsHaveAllProperties(List<Map<String, String>> theConceptPropertyInputMap,
|
private void validateCreatedConceptsHaveAllProperties(List<Map<String, String>> theConceptPropertyInputMap,
|
||||||
Multimap<String, Pair<String, String>> theConceptMapToCvsMap) {
|
Multimap<String, Pair<String, String>> theConceptMapToCvsMap) {
|
||||||
|
|
||||||
TermCodeSystemVersion tcsVersion = getTermCodeSystemVersion();
|
TermCodeSystemVersion tcsVersion = getTermCodeSystemVersion();
|
||||||
|
|
||||||
ourLog.info("Properties to process: {}", ourDecimalFormat.format(theConceptPropertyInputMap.size()));
|
ourLog.info("Properties to validate: {}", ourDecimalFormat.format(theConceptPropertyInputMap.size()));
|
||||||
|
|
||||||
for (Map<String, String> tcRecordMap : theConceptPropertyInputMap) {
|
for (Map<String, String> tcRecordMap : theConceptPropertyInputMap) {
|
||||||
String recordCode = getRecordCode(tcRecordMap);
|
String recordCode = getRecordCode(tcRecordMap);
|
||||||
processedPropertiesCounter++;
|
validatedPropertiesCounter++;
|
||||||
|
|
||||||
runInTransaction(() -> {
|
runInTransaction(() -> {
|
||||||
Optional<TermConcept> tcFomDbOpt = myTermConceptDao.findByCodeSystemAndCode(tcsVersion, recordCode);
|
Optional<TermConcept> tcFomDbOpt = myTermConceptDao.findByCodeSystemAndCode(tcsVersion, recordCode);
|
||||||
|
@ -304,8 +291,8 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
() -> ourLog.error("Couldn't find TermConcept with code: {} in DB", recordCode));
|
() -> ourLog.error("Couldn't find TermConcept with code: {} in DB", recordCode));
|
||||||
});
|
});
|
||||||
|
|
||||||
if (processedPropertiesCounter % 10_000 == 0) {
|
if (validatedPropertiesCounter % 10_000 == 0) {
|
||||||
ourLog.info("Processed properties: {}", ourDecimalFormat.format(processedPropertiesCounter));
|
ourLog.info("Validated properties: {}", ourDecimalFormat.format(validatedPropertiesCounter));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ourLog.info("");
|
ourLog.info("");
|
||||||
|
@ -320,8 +307,17 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For received TC:
|
||||||
|
* _ validates that code is same as record code
|
||||||
|
* _ calls mew properties validator
|
||||||
|
* _ calls MapTo properties validator
|
||||||
|
* @param theTermConcept the TermConcept to validate
|
||||||
|
* @param theRecordMap the map of propName -> propValue of all defined input properties for TC
|
||||||
|
* @param theConceptMapToCvsMap the map of TC-code -> List of pair (value, display) for each property defined in input MapTo.csv file
|
||||||
|
*/
|
||||||
private void validateTermConceptEntry(TermConcept theTermConcept,
|
private void validateTermConceptEntry(TermConcept theTermConcept,
|
||||||
Map<String, String> theRecordMap, Multimap<String, Pair<String, String>> theConceptMapToCvsMap) {
|
Map<String, String> theRecordMap, Multimap<String, Pair<String, String>> theConceptMapToCvsMap) {
|
||||||
|
|
||||||
String recordCode = getRecordCode(theRecordMap);
|
String recordCode = getRecordCode(theRecordMap);
|
||||||
if ( ! theTermConcept.getCode().equals(recordCode) ) {
|
if ( ! theTermConcept.getCode().equals(recordCode) ) {
|
||||||
|
@ -329,21 +325,53 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
ourLog.trace("Validating new properties for TC with code: {}", theTermConcept.getCode());
|
ourLog.trace("Validating new properties for TC with code: {}", theTermConcept.getCode());
|
||||||
// map of TC property name | set of property values
|
// map of TC property name -> pair(value, display)
|
||||||
HashMap<String, Set<String>> tcConceptPropertyMap = theTermConcept.getProperties().stream()
|
HashMap<String, Set<Pair<String, String>>> tcCodeValueDisplayMap = theTermConcept.getProperties().stream()
|
||||||
.collect(Collectors.groupingBy(TermConceptProperty::getKey,
|
.collect(Collectors.groupingBy(TermConceptProperty::getKey,
|
||||||
HashMap::new,
|
HashMap::new,
|
||||||
mapping(TermConceptProperty::getValue, toSet())));
|
mapping(tcp -> Pair.of(tcp.getValue(), tcp.getDisplay()), toSet())));
|
||||||
|
|
||||||
validateNewProperties(theTermConcept, theRecordMap, tcConceptPropertyMap);
|
validateNewProperties(theTermConcept, theRecordMap, tcCodeValueDisplayMap);
|
||||||
|
|
||||||
Collection<Pair<String, String>> toMapRecordForTermConcept = theConceptMapToCvsMap.get(recordCode);
|
Collection<Pair<String, String>> toMapRecordForTermConcept = theConceptMapToCvsMap.get(recordCode);
|
||||||
// validateMapToProperties(recordCode, tcConceptPropertyMap, toMapRecordForTermConcept);
|
validateMapToProperties(recordCode, tcCodeValueDisplayMap, toMapRecordForTermConcept);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For each received TC which has a MapTo record, validates that:
|
||||||
|
* _ each record property generated a MAP_TO value property in TC
|
||||||
|
* _ each not-null display property was set as the MAP_TO display property in TC
|
||||||
|
* @param theRecordCode key code of the record and TC
|
||||||
|
* @param theTcConceptPropertyMap map of propName -> pair(value, display) from input MapTo.csv (display can be null)
|
||||||
|
* @param theToMapRecordForTC the collection af MAP_TO property value-display pairs (display can be null)
|
||||||
|
*/
|
||||||
|
private void validateMapToProperties(String theRecordCode,
|
||||||
|
HashMap<String, Set<Pair<String, String>>> theTcConceptPropertyMap,
|
||||||
|
Collection<Pair<String, String>> theToMapRecordForTC) {
|
||||||
|
|
||||||
|
if (CollectionUtils.isEmpty(theToMapRecordForTC)) { return; } // no MapTo record for this TermConcept
|
||||||
|
|
||||||
|
ourLog.trace("Validating MapTo properties for TC with code: {}", theRecordCode);
|
||||||
|
|
||||||
|
Set<Pair<String, String>> tcConceptProps = theTcConceptPropertyMap.get("MAP_TO");
|
||||||
|
HashSet<Pair<String, String>> theToMapRecordForTCAsSet = new HashSet<>(theToMapRecordForTC);
|
||||||
|
|
||||||
|
mapToAsserts.add( () -> assertEquals(tcConceptProps, theToMapRecordForTCAsSet, "TermConcept for code: '" +
|
||||||
|
theRecordCode + "' 'MAP_TO' properties don't match MapTo.csv file properties") );
|
||||||
|
|
||||||
|
validatedMapToEntriesCounter++;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param theTermConcept the TermConcept to validate
|
||||||
|
* @param theRecordPropsMap map of propName -> pair(value, display) from input MapTo.csv (display is nullable)
|
||||||
|
* @param theTcConceptPropertyMap the map propName -> Pair(value, display) of TC (display is nullable)
|
||||||
|
*/
|
||||||
private void validateNewProperties(TermConcept theTermConcept, Map<String, String> theRecordPropsMap,
|
private void validateNewProperties(TermConcept theTermConcept, Map<String, String> theRecordPropsMap,
|
||||||
HashMap<String, Set<String>> theTcConceptPropertyMap) {
|
HashMap<String, Set<Pair<String, String>>> theTcConceptPropertyMap) {
|
||||||
|
|
||||||
// make sure we are good so far and both entries to compare are for same TermConcept code
|
// make sure we are good so far and both entries to compare are for same TermConcept code
|
||||||
assertEquals(theTermConcept.getCode(), theRecordPropsMap.get("LOINC_NUM"), "theTcCode and record key (LOINC_NUM) must match");
|
assertEquals(theTermConcept.getCode(), theRecordPropsMap.get("LOINC_NUM"), "theTcCode and record key (LOINC_NUM) must match");
|
||||||
|
@ -356,60 +384,72 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
// bypass old properties
|
// bypass old properties
|
||||||
if ( ! newRecordPropertyNames.contains(recordEntry.getKey()) ) { continue; }
|
if ( ! newRecordPropertyNames.contains(recordEntry.getKey()) ) { continue; }
|
||||||
|
|
||||||
Set<String> tcConceptValues = theTcConceptPropertyMap.get(recordEntry.getKey());
|
Set<Pair<String, String>> tcPropsValueDisplay = theTcConceptPropertyMap.get(recordEntry.getKey());
|
||||||
|
|
||||||
if ( ASSOCIATED_OBSERVATIONS_PROP_NAME.equals(recordEntry.getKey()) ) {
|
if ( ASSOCIATED_OBSERVATIONS_PROP_NAME.equals(recordEntry.getKey()) ) {
|
||||||
associatedObservationsCount++;
|
associatedObservationsCount++;
|
||||||
validateCodingProperties(theTermConcept, ASSOCIATED_OBSERVATIONS_PROP_NAME, recordEntry, tcConceptValues);
|
validateCodingProperties(theTermConcept, recordEntry.getKey(), recordEntry, tcPropsValueDisplay);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( ASK_AT_ORDER_ENTRY_PROP_NAME.equals(recordEntry.getKey()) ) {
|
if ( ASK_AT_ORDER_ENTRY_PROP_NAME.equals(recordEntry.getKey()) ) {
|
||||||
askAtOrderEntryCount++;
|
askAtOrderEntryCount++;
|
||||||
validateCodingProperties(theTermConcept, ASK_AT_ORDER_ENTRY_PROP_NAME, recordEntry, tcConceptValues);
|
validateCodingProperties(theTermConcept, recordEntry.getKey(), recordEntry, tcPropsValueDisplay);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (CollectionUtils.isEmpty(tcConceptValues)) {
|
assertEquals(1, tcPropsValueDisplay.size(), "TermConcept with code: {} was expected to have 1 property " +
|
||||||
ourLog.error("TermConcept with code: {} does not have property: {} which in csv file has value: {}",
|
"with key: " + recordEntry.getKey() + " and value: " + recordEntry.getValue() + " but has: " + tcPropsValueDisplay.size() + " instead." );
|
||||||
theTermConcept.getCode(), recordEntry.getKey(), recordEntry.getValue());
|
|
||||||
|
String tcPropValue = tcPropsValueDisplay.iterator().next().getLeft();
|
||||||
|
if ( ! recordEntry.getValue().equals(tcPropValue) ) {
|
||||||
|
ourLog.error("TermConcept with code: {} property: {} expected value: {}, found value: {}",
|
||||||
|
theTermConcept.getCode(), recordEntry.getKey(), recordEntry.getValue(), tcPropValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate that all file property codes become a "Coding" property on the TermConcept
|
* Validate that all file CODING properties become a "Coding" property on the TermConcept
|
||||||
* and display properties are the display of the target TermConcept
|
* and display properties are the display of the target TermConcept
|
||||||
*/
|
*/
|
||||||
private void validateCodingProperties(TermConcept theSourceTermConcept, String thePropName,
|
private void validateCodingProperties(TermConcept theTermConcept, String thePropName,
|
||||||
Map.Entry<String, String> recordEntry, Set<String> theTCPropValues) {
|
Map.Entry<String, String> recordEntry, Set<Pair<String, String>> theTCPropValueDisplaySet) {
|
||||||
|
|
||||||
List<String> recordPropertyCodes = parsePropertyCodeValues(recordEntry.getValue());
|
List<String> recordPropertyCodes = parsePropertyCodeValues(recordEntry.getValue());
|
||||||
|
|
||||||
// validate each property in the records was uploaded to the corresponding TermConcept
|
// validate that each property value in the records was uploaded to the corresponding TermConcept
|
||||||
for (String recordPropertyCode : recordPropertyCodes) {
|
List<String> tcPropValues = theTCPropValueDisplaySet.stream().map(Pair::getLeft).collect(Collectors.toList());
|
||||||
if ( ! theTCPropValues.contains(recordPropertyCode) ) {
|
checkCodeSetsEqual(recordPropertyCodes, tcPropValues, theTermConcept.getCode(), thePropName);
|
||||||
ourLog.error("For TC code: {}, prop: {}, record code: {} not found among uploaded TC properties: {}",
|
|
||||||
theSourceTermConcept.getCode(), recordEntry.getKey(), recordPropertyCode, String.join(" - ", theTCPropValues));
|
|
||||||
}
|
|
||||||
|
|
||||||
// validate that the display value for each uploaded TC property of name thePropertyName is the display of the TC pointed by the TC code
|
// validate that the display value for each uploaded TC property of name thePropName is the display of the TC pointed by the property code
|
||||||
validatePropertiesDisplay(theSourceTermConcept, thePropName, recordPropertyCode);
|
validatePropertyDisplays(theTermConcept, thePropName);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkCodeSetsEqual(List<String> theExpectedCodes, List<String> theCreatedCodes, String theTcCode, String thePropName) {
|
||||||
|
if (theExpectedCodes.equals(theCreatedCodes)) return;
|
||||||
|
|
||||||
|
// inform each expected code not present in TC
|
||||||
|
for (String recordPropertyCode : theExpectedCodes) {
|
||||||
|
if ( ! theCreatedCodes.contains(recordPropertyCode) ) {
|
||||||
|
ourLog.error("For TC code: {}, prop: {}, record code: {} not found among uploaded TC properties: {}",
|
||||||
|
theTcCode, thePropName, recordPropertyCode, theCreatedCodes);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// also check that uploaded TC only has properties is has to have
|
// inform each TC code not present in expected
|
||||||
for (String tcPropValue : theTCPropValues) {
|
for (String tcPropertyCode : theCreatedCodes) {
|
||||||
if ( ! recordEntry.getValue().contains(tcPropValue)) {
|
if ( ! theExpectedCodes.contains(tcPropertyCode) ) {
|
||||||
ourLog.error("TC with code: {}, has a property with code: {}, which is not in defined property list: {}",
|
ourLog.error("TC with code: {}, prop: {}, TC code: {} not found among record properties: {}",
|
||||||
theSourceTermConcept.getCode(), tcPropValue, recordEntry.getValue());
|
theTcCode, thePropName, theCreatedCodes, tcPropertyCode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void validatePropertiesDisplay(TermConcept theSourceTermConcept, String thePropName, String recordPropertyCode) {
|
private void validatePropertyDisplays(TermConcept theSourceTermConcept, String thePropName) {
|
||||||
// from source TermConcept obtain the map of thePropName properties: property code - display
|
// from TermConcept obtain the map of thePropName properties: property code - display
|
||||||
Map<String, String> srcTcCodeDisplayMap = theSourceTermConcept.getProperties().stream()
|
Map<String, String> srcTcCodeDisplayMap = theSourceTermConcept.getProperties().stream()
|
||||||
.filter(p -> p.getKey().equals(thePropName))
|
.filter(p -> p.getKey().equals(thePropName))
|
||||||
.collect(Collectors.toMap(TermConceptProperty::getValue, TermConceptProperty::getDisplay));
|
.collect(Collectors.toMap(TermConceptProperty::getValue, TermConceptProperty::getDisplay));
|
||||||
|
@ -418,13 +458,14 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
Optional<TermConcept> targetTermConceptOpt =
|
Optional<TermConcept> targetTermConceptOpt =
|
||||||
myTermConceptDao.findByCodeSystemAndCode(termCodeSystemVersion, tcCodeDisplayEntry.getKey());
|
myTermConceptDao.findByCodeSystemAndCode(termCodeSystemVersion, tcCodeDisplayEntry.getKey());
|
||||||
if (targetTermConceptOpt.isEmpty()) {
|
if (targetTermConceptOpt.isEmpty()) {
|
||||||
ourLog.error("For TC code: {}, target TC with code: {} is not present in DB", theSourceTermConcept.getCode(), recordPropertyCode);
|
ourLog.error("For TC code: {}, target TC with code: {} is not present in DB",
|
||||||
}
|
theSourceTermConcept.getCode(), tcCodeDisplayEntry.getKey());
|
||||||
|
} else {
|
||||||
TermConcept targetTermConcept = targetTermConceptOpt.get();
|
TermConcept targetTermConcept = targetTermConceptOpt.get();
|
||||||
if ( ! tcCodeDisplayEntry.getValue().equals(targetTermConcept.getDisplay()) ) {
|
if ( ! tcCodeDisplayEntry.getValue().equals(targetTermConcept.getDisplay()) ) {
|
||||||
ourLog.error("For TC with code: {}, display is: {}, while target TC display is: {}",
|
ourLog.error("For TC with code: {}, display is: {}, while target TC display is: {}",
|
||||||
theSourceTermConcept.getCode(), tcCodeDisplayEntry.getValue(), targetTermConcept.getDisplay());
|
theSourceTermConcept.getCode(), tcCodeDisplayEntry.getValue(), targetTermConcept.getDisplay());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -438,14 +479,9 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
|
|
||||||
|
|
||||||
private List<Map<String, String>> readLoincCsvRecordsAsMap() throws Exception {
|
private List<Map<String, String>> readLoincCsvRecordsAsMap() throws Exception {
|
||||||
CSVParser parser = getParserForZipFile(LOINC_ZIP_CLASSPATH, LOINC_CSV_ZIP_ENTRY_PATH);
|
CSVParser parser = getParserForZipFile(LOINC_CSV_ZIP_ENTRY_PATH);
|
||||||
Iterator<CSVRecord> iter = parser.iterator();
|
Iterator<CSVRecord> iter = parser.iterator();
|
||||||
|
|
||||||
Map<String, Integer> headerMap = parser.getHeaderMap();
|
|
||||||
recordPropertyNames = headerMap.entrySet().stream()
|
|
||||||
.sorted(Comparator.comparingInt(Map.Entry::getValue))
|
|
||||||
.map(Map.Entry::getKey)
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
ourLog.debug("Header map: {}", parser.getHeaderMap());
|
ourLog.debug("Header map: {}", parser.getHeaderMap());
|
||||||
|
|
||||||
int count = 0;
|
int count = 0;
|
||||||
|
@ -461,14 +497,43 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
records.add( nextRecord.toMap() );
|
records.add( nextRecord.toMap() );
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
ourLog.info("Read and mapped {} csv file lines", count);
|
ourLog.info("Read and mapped {} {} file lines", ourDecimalFormat.format(count), LOINC_CSV_ZIP_ENTRY_PATH);
|
||||||
return records;
|
return records;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private Multimap<String, Pair<String, String>> readMapToCsvRecordsAsMap() throws Exception {
|
||||||
|
CSVParser parser = getParserForZipFile(LOINC_MAP_TO_ZIP_ENTRY_PATH);
|
||||||
|
|
||||||
|
ourLog.debug("Header map: {}", parser.getHeaderMap());
|
||||||
|
|
||||||
|
Multimap<String, Pair<String, String>> records = ArrayListMultimap.create();
|
||||||
|
int count = 0;
|
||||||
|
|
||||||
|
for (CSVRecord nextRecord : parser) {
|
||||||
|
if (!nextRecord.isConsistent()) {
|
||||||
|
ourLog.error("Inconsistent record");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
String code = nextRecord.get(LoincMapToHandler.CONCEPT_CODE_PROP_NAME);
|
||||||
|
assertNotNull(code, "MapTo record with blank '" + LoincMapToHandler.CONCEPT_CODE_PROP_NAME + "' field: " + nextRecord);
|
||||||
|
String toValue = nextRecord.get(LoincMapToHandler.MAP_TO_PROP_NAME);
|
||||||
|
assertNotNull(code, "MapTo record with blank '" + LoincMapToHandler.MAP_TO_PROP_NAME + "' field: " + nextRecord);
|
||||||
|
|
||||||
|
records.put(code, Pair.of(toValue, nextRecord.get(LoincMapToHandler.DISPLAY_PROP_NAME)));
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
ourLog.info("Read and mapped {} {} file lines into {} map entries", ourDecimalFormat.format(count),
|
||||||
|
LOINC_MAP_TO_ZIP_ENTRY_PATH, ourDecimalFormat.format(records.asMap().size()));
|
||||||
|
return records;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private CSVParser getParserForZipFile(String theZipFileClassPath, String theFileEntryPath) throws Exception {
|
private CSVParser getParserForZipFile(String theFileEntryPath) throws Exception {
|
||||||
Reader reader = new StringReader(getCvsStringFromZip(theZipFileClassPath, theFileEntryPath));
|
Reader reader = new StringReader(getCvsStringFromZip(LOINC_ZIP_CLASSPATH, theFileEntryPath));
|
||||||
|
|
||||||
CSVFormat format = CSVFormat
|
CSVFormat format = CSVFormat
|
||||||
.newFormat(',')
|
.newFormat(',')
|
||||||
|
@ -501,7 +566,7 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
int dbVersionedTermConceptCount = runInTransaction(() ->
|
int dbVersionedTermConceptCount = runInTransaction(() ->
|
||||||
myTermConceptDao.countByCodeSystemVersion(tcsvId) );
|
myTermConceptDao.countByCodeSystemVersion(tcsvId) );
|
||||||
ourLog.info("=================> Number of stored concepts for version {}: {}",
|
ourLog.info("=================> Number of stored concepts for version {}: {}",
|
||||||
CS_VERSION, ourDecimalFormat.format(dbVersionedTermConceptCount));
|
CS_VERSION, ourDecimalFormat.format(dbVersionedTermConceptCount) );
|
||||||
assertEquals(CS_CONCEPTS_COUNT, dbVersionedTermConceptCount);
|
assertEquals(CS_CONCEPTS_COUNT, dbVersionedTermConceptCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -534,6 +599,34 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used occasionally for some manual validation - don't delete
|
||||||
|
*/
|
||||||
|
private void queryForSpecificValueSet() {
|
||||||
|
runInTransaction(() -> {
|
||||||
|
Query q = myEntityManager.createQuery("from ForcedId where myForcedId like 'LG8749-6%'");
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<ForcedId> fIds = (List<ForcedId>) q.getResultList();
|
||||||
|
long res_id = fIds.stream().map(ForcedId::getId).sorted().findFirst().orElse(fail("ForcedId not found"));
|
||||||
|
|
||||||
|
Query q1 = myEntityManager.createQuery("from ResourceTable where id = " + res_id);
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<ResourceTable> vsList = (List<ResourceTable>) q1.getResultList();
|
||||||
|
assertEquals(1, vsList.size());
|
||||||
|
long vsLongId = vsList.get(0).getId();
|
||||||
|
ValueSet vs = (ValueSet) myValueSetDao.toResource( vsList.get(0), false );
|
||||||
|
assertNotNull(vs);
|
||||||
|
|
||||||
|
Query q2 = myEntityManager.createQuery("from TermValueSet where myResource = " + vsLongId);
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<TermValueSet> tvsList = (List<TermValueSet>) q2.getResultList();
|
||||||
|
assertEquals(1, tvsList.size());
|
||||||
|
|
||||||
|
TermValueSet termValueSet = tvsList.get(0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// List of all columns in Loinc.csv input file
|
// List of all columns in Loinc.csv input file
|
||||||
// private static final String[] recordFieldNames = {
|
// private static final String[] recordFieldNames = {
|
||||||
// "LOINC_NUM"
|
// "LOINC_NUM"
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
LoincFullLoadR4SandboxIT requires this directory must contain the following
|
LoincFullLoadR4SandboxIT requires this directory must contain the following files:
|
||||||
three files:
|
_ Loinc_1.11.zip
|
||||||
_ Loinc.csv.gz
|
|
||||||
_ Loinc_1.11.zip and
|
|
||||||
_ v1.11_loincupload.properties
|
_ v1.11_loincupload.properties
|
||||||
|
|
||||||
but those files are too large for the repo, so before running this test you need to copy them from:
|
but zip file is too large for the repo, so before running this test you need to copy it from:
|
||||||
https://drive.google.com/drive/folders/18be2R5IurlWnugkl18nDG7wrwPsOtfR-?usp=sharing
|
https://drive.google.com/drive/folders/18be2R5IurlWnugkl18nDG7wrwPsOtfR-?usp=sharing
|
||||||
(SmileCDR has access)
|
(SmileCDR has access)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue