From 6f84d17b13d6b974f1d973465a005bdb7bbbe4ee Mon Sep 17 00:00:00 2001 From: Martha Mitran Date: Mon, 20 Nov 2023 10:23:37 -0800 Subject: [PATCH] Expanding ValueSet using hierarchical CodeSystem fails with constraint violation exception (#5461) * Expanding a ValueSet using hierarchical CodeSystem would fail in different scenarios with a constraint violation exception when codes (term concepts) were being persisted * Expanding a ValueSet using hierarchical CodeSystem would fail in different scenarios with a constraint violation exception when codes (term concepts) were being persisted * Small changes after code review. Doing some variable/method renaming and correcting changelog Jira issue number. --- ...lizer_fails_capabilitystatement_dstu2.yaml | 2 +- ...valueset-with-hierarchical-codesystem.yaml | 6 + .../ca/uhn/fhir/jpa/term/TermReadSvcImpl.java | 91 ++++++++------- .../jpa/term/ValueSetExpansionR4Test.java | 9 +- .../ValueSetExpansionWithHierarchyR4Test.java | 109 ++++++++++++++++++ 5 files changed, 167 insertions(+), 50 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5460-failed-to-expand-valueset-with-hierarchical-codesystem.yaml create mode 100644 hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionWithHierarchyR4Test.java diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_0/5431-version_canonicalizer_fails_capabilitystatement_dstu2.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_0/5431-version_canonicalizer_fails_capabilitystatement_dstu2.yaml index 771291f525e..2847a698b16 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_0/5431-version_canonicalizer_fails_capabilitystatement_dstu2.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_10_0/5431-version_canonicalizer_fails_capabilitystatement_dstu2.yaml @@ -1,5 +1,5 @@ --- type: fix issue: 5431 -jira: SMILE-5306 +jira: SMILE-7589 title: "Previously, using VersionCanonicalizer to convert a CapabilityStatement from R5 to DSTU2 would fail. This is now fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5460-failed-to-expand-valueset-with-hierarchical-codesystem.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5460-failed-to-expand-valueset-with-hierarchical-codesystem.yaml new file mode 100644 index 00000000000..704cadfb039 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5460-failed-to-expand-valueset-with-hierarchical-codesystem.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 5460 +jira: SMILE-7594 +title: "Previously, expanding a ValueSet using hierarchical CodeSystem would fail in different scenarios +with a constraint violation exception when codes (term concepts) were being persisted. This is now fixed." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java index 447e3058c4d..73f95569ba6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java @@ -196,14 +196,9 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { public static final int DEFAULT_MASS_INDEXER_OBJECT_LOADING_THREADS = 2; // doesn't seem to be much gain by using more threads than this value public static final int MAX_MASS_INDEXER_OBJECT_LOADING_THREADS = 6; - private static final int SINGLE_FETCH_SIZE = 1; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermReadSvcImpl.class); private static final ValueSetExpansionOptions DEFAULT_EXPANSION_OPTIONS = new ValueSetExpansionOptions(); private static final TermCodeSystemVersionDetails NO_CURRENT_VERSION = new TermCodeSystemVersionDetails(-1L, null); - private static final String IDX_PROPERTIES = "myProperties"; - private static final String IDX_PROP_KEY = IDX_PROPERTIES + ".myKey"; - private static final String IDX_PROP_VALUE_STRING = IDX_PROPERTIES + ".myValueString"; - private static final String IDX_PROP_DISPLAY_STRING = IDX_PROPERTIES + ".myDisplayString"; private static final String OUR_PIPE_CHARACTER = "|"; private static final int SECONDS_IN_MINUTE = 60; private static final int INDEXED_ROOTS_LOGGING_COUNT = 50_000; @@ -540,6 +535,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Nullable ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, ExpansionFilter theFilter) { + Set addedCodes = new HashSet<>(); ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null"); ValueSetExpansionOptions expansionOptions = provideExpansionOptions(theExpansionOptions); @@ -560,9 +556,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { accumulator.addParameter().setName("count").setValue(new IntegerType(count)); } - myTxTemplate.executeWithoutResult(tx -> { - expandValueSetIntoAccumulator(theValueSetToExpand, theExpansionOptions, accumulator, theFilter, true); - }); + myTxTemplate.executeWithoutResult(tx -> expandValueSetIntoAccumulator( + theValueSetToExpand, theExpansionOptions, accumulator, theFilter, true, addedCodes)); if (accumulator.getTotalConcepts() != null) { accumulator.setTotal(accumulator.getTotalConcepts()); @@ -594,7 +589,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { ValueSetExpansionOptions theExpansionOptions, IValueSetConceptAccumulator theAccumulator, ExpansionFilter theFilter, - boolean theAdd) { + boolean theAdd, + Set theAddedCodes) { Optional optionalTermValueSet; if (theValueSetToExpand.hasUrl()) { if (theValueSetToExpand.hasVersion()) { @@ -621,7 +617,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { "valueSetExpandedUsingInMemoryExpansion", getValueSetInfo(theValueSetToExpand)); theAccumulator.addMessage(msg); - doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter); + doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter, theAddedCodes); return; } @@ -639,7 +635,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { termValueSet.getExpansionStatus().name(), termValueSet.getExpansionStatus().getDescription()); theAccumulator.addMessage(msg); - doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter); + doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter, theAddedCodes); return; } @@ -651,7 +647,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { .getLocalizer() .getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp); theAccumulator.addMessage(msg); - expandConcepts(theExpansionOptions, theAccumulator, termValueSet, theFilter, theAdd, isOracleDialect()); + expandConcepts( + theExpansionOptions, theAccumulator, termValueSet, theFilter, theAdd, theAddedCodes, isOracleDialect()); } @Nonnull @@ -676,6 +673,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { TermValueSet theTermValueSet, ExpansionFilter theFilter, boolean theAdd, + Set theAddedCodes, boolean theOracle) { // NOTE: if you modifiy the logic here, look to `expandConceptsOracle` and see if your new code applies to its // copy pasted sibling @@ -708,10 +706,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } wasFilteredResult = true; } else { - // TODO JA HS: I'm pretty sure we are overfetching here. test says offset 3, count 4, but we are fetching - // index 3 -> 10 here, grabbing 7 concepts. - // Specifically this test - // testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange if (theOracle) { conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId( offset, toIndex, theTermValueSet.getId()); @@ -802,26 +796,27 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { Long sourceConceptPid = pidToSourcePid.get(nextPid); String sourceConceptDirectParentPids = pidToSourceDirectParentPids.get(nextPid); - theAccumulator.includeConceptWithDesignations( - system, - code, - display, - designations, - sourceConceptPid, - sourceConceptDirectParentPids, - systemVersion); + if (theAddedCodes.add(system + OUR_PIPE_CHARACTER + code)) { + theAccumulator.includeConceptWithDesignations( + system, + code, + display, + designations, + sourceConceptPid, + sourceConceptDirectParentPids, + systemVersion); + if (wasFilteredResult) { + theAccumulator.incrementOrDecrementTotalConcepts(true, 1); + } + } } else { - boolean removed = theAccumulator.excludeConcept(system, code); - if (removed) { + if (theAddedCodes.remove(system + OUR_PIPE_CHARACTER + code)) { + theAccumulator.excludeConcept(system, code); theAccumulator.incrementOrDecrementTotalConcepts(false, 1); } } } - if (wasFilteredResult && theAdd) { - theAccumulator.incrementOrDecrementTotalConcepts(true, pidToConcept.size()); - } - logDesignationsExpanded("Finished expanding designations. ", theTermValueSet, designationsExpanded); logConceptsExpanded("Finished expanding concepts. ", theTermValueSet, conceptsExpanded); } @@ -886,8 +881,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { + Set addedCodes = new HashSet<>(); doExpandValueSet( - theExpansionOptions, theValueSetToExpand, theValueSetCodeAccumulator, ExpansionFilter.NO_FILTER); + theExpansionOptions, + theValueSetToExpand, + theValueSetCodeAccumulator, + ExpansionFilter.NO_FILTER, + addedCodes); } /** @@ -898,8 +898,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, - @Nonnull ExpansionFilter theExpansionFilter) { - Set addedCodes = new HashSet<>(); + @Nonnull ExpansionFilter theExpansionFilter, + Set theAddedCodes) { StopWatch sw = new StopWatch(); String valueSetInfo = getValueSetInfo(theValueSetToExpand); @@ -908,7 +908,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { // Offset can't be combined with excludes Integer skipCountRemaining = theValueSetCodeAccumulator.getSkipCountRemaining(); if (skipCountRemaining != null && skipCountRemaining > 0) { - if (theValueSetToExpand.getCompose().getExclude().size() > 0) { + if (!theValueSetToExpand.getCompose().getExclude().isEmpty()) { String msg = myContext .getLocalizer() .getMessage(TermReadSvcImpl.class, "valueSetNotYetExpanded_OffsetNotAllowed", valueSetInfo); @@ -921,7 +921,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { myTxTemplate.executeWithoutResult(tx -> expandValueSetHandleIncludeOrExclude( - theExpansionOptions, theValueSetCodeAccumulator, addedCodes, include, true, theExpansionFilter)); + theExpansionOptions, theValueSetCodeAccumulator, theAddedCodes, include, true, theExpansionFilter)); } // Handle excludes @@ -931,7 +931,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { myTxTemplate.executeWithoutResult(tx -> expandValueSetHandleIncludeOrExclude( theExpansionOptions, theValueSetCodeAccumulator, - addedCodes, + theAddedCodes, exclude, false, ExpansionFilter.NO_FILTER)); @@ -976,7 +976,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { String system = theIncludeOrExclude.getSystem(); boolean hasSystem = isNotBlank(system); - boolean hasValueSet = theIncludeOrExclude.getValueSet().size() > 0; + boolean hasValueSet = !theIncludeOrExclude.getValueSet().isEmpty(); if (hasSystem) { @@ -1004,7 +1004,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } else { - if (theIncludeOrExclude.getConcept().size() > 0 && theExpansionFilter.hasCode()) { + if (!theIncludeOrExclude.getConcept().isEmpty() && theExpansionFilter.hasCode()) { if (defaultString(theIncludeOrExclude.getSystem()).equals(theExpansionFilter.getSystem())) { if (theIncludeOrExclude.getConcept().stream() .noneMatch(t -> t.getCode().equals(theExpansionFilter.getCode()))) { @@ -1065,7 +1065,12 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } expandValueSetIntoAccumulator( - valueSet, theExpansionOptions, theValueSetCodeAccumulator, subExpansionFilter, theAdd); + valueSet, + theExpansionOptions, + theValueSetCodeAccumulator, + subExpansionFilter, + theAdd, + theAddedCodes); } } else { @@ -1855,12 +1860,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { if (!handled) { throwInvalidFilter( nextFilter, - " - Note that Hibernate Search is disabled on this server so not all ValueSet expansion funtionality is available."); + " - Note that Hibernate Search is disabled on this server so not all ValueSet expansion functionality is available."); } } - if (theInclude.getConcept().isEmpty()) { - + if (theInclude.getFilter().isEmpty() && theInclude.getConcept().isEmpty()) { Collection concepts = myConceptDao.fetchConceptsAndDesignationsByVersionPid(theVersion.getPid()); for (TermConcept next : concepts) { @@ -2392,7 +2396,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { sw); } catch (Exception e) { - ourLog.error("Failed to pre-expand ValueSet: " + e.getMessage(), e); + ourLog.error( + "Failed to pre-expand ValueSet with URL[{}]: {}", valueSetToExpand.getUrl(), e.getMessage(), e); txTemplate.executeWithoutResult(t -> { valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND); myTermValueSetDao.saveAndFlush(valueSetToExpand); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java index bc22114e25f..2509c25d18a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java @@ -36,7 +36,6 @@ import org.hl7.fhir.r4.model.codesystems.HttpVerb; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.Mock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.domain.Pageable; @@ -73,9 +72,6 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { private final ValueSetTestUtil myValueSetTestUtil = new ValueSetTestUtil(FhirVersionEnum.R4); - @Mock - private IValueSetConceptAccumulator myValueSetCodeAccumulator; - @AfterEach public void afterEach() { SearchBuilder.setMaxPageSize50ForTest(false); @@ -280,8 +276,9 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { //Ensure that the subsequent expansion with offset returns the same slice we are anticipating. assertThat(myValueSetTestUtil.toCodes(expandedValueSet).toString(), myValueSetTestUtil.toCodes(expandedValueSet), is(equalTo(expandedConceptCodes.subList(offset, offset + count)))); - Assertions.assertEquals(4, expandedValueSet.getExpansion().getContains().size(), myValueSetTestUtil.toCodes(expandedValueSet).toString()); - assertEquals(11, expandedValueSet.getExpansion().getTotal()); + Assertions.assertEquals(count, expandedValueSet.getExpansion().getContains().size(), myValueSetTestUtil.toCodes(expandedValueSet).toString()); + assertEquals(offset + count, expandedValueSet.getExpansion().getTotal()); + assertEquals(count, expandedValueSet.getExpansion().getContains().size()); // Make sure we used the pre-expanded version List selectQueries = myCaptureQueriesListener.getSelectQueries(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionWithHierarchyR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionWithHierarchyR4Test.java new file mode 100644 index 00000000000..a03e78028b9 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionWithHierarchyR4Test.java @@ -0,0 +1,109 @@ +package ca.uhn.fhir.jpa.term; + +import ca.uhn.fhir.jpa.entity.TermValueSet; +import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; +import org.hl7.fhir.r4.model.CodeSystem; +import org.hl7.fhir.r4.model.ValueSet; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.Optional; +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +public class ValueSetExpansionWithHierarchyR4Test extends BaseTermR4Test { + private static final String ourCodeSystemId = "CodeSystem-WithHierarchy", ourCodeSystemUrl = "http://example/" + ourCodeSystemId; + private static final String ourCodeA = "CodeA", ourCodeB = "CodeB", ourCodeC = "CodeC", ourCodeD = "CodeD"; + private static final String ourValueSetAUrl = "http://example/ValueSetA", ourValueSetBUrl = "http://example/ValueSetB", ourValueSetUrl = "http://example/ValueSet"; + private static final int ourChildConceptCount = 10; + + @BeforeAll + public static void setup() { + TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(true); + } + @AfterAll + public static void tearDown() { + TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(false); + } + @BeforeEach + public void setupHierarchicalCodeSystemWithValueSets() { + myStorageSettings.setPreExpandValueSets(true); + + CodeSystem codeSystem = new CodeSystem(); + codeSystem.setId(ourCodeSystemId); + codeSystem.setUrl(ourCodeSystemUrl); + CodeSystem.ConceptDefinitionComponent concept1 = codeSystem.addConcept().setCode(ourCodeA); + CodeSystem.ConceptDefinitionComponent concept2 = codeSystem.addConcept().setCode(ourCodeB); + for (int i = 0; i < ourChildConceptCount; i++) { + concept1.addConcept().setCode(concept1.getCode() + i); + concept2.addConcept().setCode(concept2.getCode() + i); + } + codeSystem.addConcept().setCode(ourCodeC); + codeSystem.addConcept().setCode(ourCodeD); + myCodeSystemDao.create(codeSystem, mySrd); + + ValueSet valueSetA = new ValueSet(); + valueSetA.setUrl(ourValueSetAUrl); + valueSetA.getCompose().addInclude().setSystem(ourCodeSystemUrl) + .addFilter().setProperty("concept").setOp(ValueSet.FilterOperator.ISA).setValue(ourCodeA); + myValueSetDao.create(valueSetA, mySrd); + + ValueSet valueSetB = new ValueSet(); + valueSetB.setUrl(ourValueSetBUrl); + valueSetA.getCompose().addInclude().setSystem(ourCodeSystemUrl) + .addFilter().setProperty("concept").setOp(ValueSet.FilterOperator.ISA).setValue(ourCodeB); + myValueSetDao.create(valueSetB, mySrd); + } + + static Stream parametersValueSets() { + ValueSet valueSet0 = new ValueSet(); + valueSet0.setUrl(ourValueSetUrl + "-WithIncludeChildValueSet"); + valueSet0.getCompose().addInclude().addValueSet(ourValueSetAUrl); + + ValueSet valueSet1 = new ValueSet(); + valueSet1.setUrl(ourValueSetUrl + "-WithIncludeChildValueSetAndCodeSystem"); + valueSet1.getCompose().addInclude().addValueSet(ourValueSetAUrl); + valueSet1.getCompose().addInclude().addValueSet(ourValueSetBUrl); + valueSet1.getCompose().addInclude().setSystem(ourCodeSystemUrl); + + ValueSet valueSet2 = new ValueSet(); + valueSet2.setUrl(ourValueSetUrl + "-WithIncludeChildValueSetAndCodeSystemConceptSet-NoIntersectionCodes"); + valueSet2.getCompose().addInclude().addValueSet(ourValueSetAUrl); + ValueSet.ConceptSetComponent conceptSetWithCodeSystem = valueSet2.getCompose().addInclude().setSystem(ourCodeSystemUrl); + conceptSetWithCodeSystem.addConcept().setCode(ourCodeC); + conceptSetWithCodeSystem.addConcept().setCode(ourCodeD); + + ValueSet valueSet3 = new ValueSet(); + valueSet3.setUrl(ourValueSetUrl + "-WithIncludeChildValueSetAndCodeSystemConceptSet-WithIntersectionCodes"); + valueSet3.getCompose().addInclude().addValueSet(ourValueSetAUrl); + conceptSetWithCodeSystem = valueSet3.getCompose().addInclude().setSystem(ourCodeSystemUrl); + conceptSetWithCodeSystem.addConcept().setCode(ourCodeA + "1"); + conceptSetWithCodeSystem.addConcept().setCode(ourCodeA + "2"); + + return Stream.of( + arguments(valueSet0, ourChildConceptCount), + arguments(valueSet1, 4 + ourChildConceptCount * 2), + arguments(valueSet2, 2 + ourChildConceptCount), + arguments(valueSet3, ourChildConceptCount) + ); + } + + @ParameterizedTest + @MethodSource(value = "parametersValueSets") + public void testExpandValueSet_whenUsingHierarchicalCodeSystem_willExpandSuccessfully(ValueSet theValueSet, int theExpectedConceptExpensionCount) { + myValueSetDao.create(theValueSet, mySrd); + myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + Optional optionalTermValueSet = runInTransaction(() -> myTermValueSetDao.findTermValueSetByUrlAndNullVersion(theValueSet.getUrl())); + assertTrue(optionalTermValueSet.isPresent()); + TermValueSet expandedTermValueSet = optionalTermValueSet.get(); + assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, expandedTermValueSet.getExpansionStatus()); + assertEquals(theExpectedConceptExpensionCount, expandedTermValueSet.getTotalConcepts()); + } +}