Testing Part 3
This commit is contained in:
parent
09ddd964ff
commit
97c11a9be0
|
@ -591,7 +591,7 @@ public class TerminologyCache {
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String hashJson(String s) {
|
protected String hashJson(String s) {
|
||||||
s = StringUtils.remove(s, ' ');
|
s = StringUtils.remove(s, ' ');
|
||||||
s = StringUtils.remove(s, '\n');
|
s = StringUtils.remove(s, '\n');
|
||||||
s = StringUtils.remove(s, '\r');
|
s = StringUtils.remove(s, '\r');
|
||||||
|
|
|
@ -2,14 +2,16 @@ package org.hl7.fhir.r5.context;
|
||||||
|
|
||||||
import com.google.gson.JsonElement;
|
import com.google.gson.JsonElement;
|
||||||
import com.google.gson.JsonParser;
|
import com.google.gson.JsonParser;
|
||||||
import org.checkerframework.checker.units.qual.C;
|
import org.hl7.fhir.r5.formats.IParser;
|
||||||
import org.hl7.fhir.r5.model.CodeableConcept;
|
import org.hl7.fhir.r5.model.CodeableConcept;
|
||||||
import org.hl7.fhir.r5.model.Coding;
|
import org.hl7.fhir.r5.model.Coding;
|
||||||
import org.hl7.fhir.r5.model.ValueSet;
|
import org.hl7.fhir.r5.model.ValueSet;
|
||||||
import org.hl7.fhir.r5.terminologies.ValueSetExpander;
|
import org.hl7.fhir.r5.terminologies.ValueSetExpander;
|
||||||
import org.hl7.fhir.utilities.validation.ValidationMessage;
|
import org.hl7.fhir.utilities.validation.ValidationMessage;
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -17,9 +19,15 @@ import java.net.URISyntaxException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Random;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.IntStream;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
|
||||||
public class TerminologyCacheTests {
|
public class TerminologyCacheTests {
|
||||||
|
@ -32,6 +40,12 @@ public class TerminologyCacheTests {
|
||||||
return jsonParser.parse(stringValue);
|
return jsonParser.parse(stringValue);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
private TerminologyCache createTerminologyCache() throws IOException {
|
||||||
|
Object lock = new Object();
|
||||||
|
TerminologyCache terminologyCache = new TerminologyCache(lock, null);
|
||||||
|
return terminologyCache;
|
||||||
|
}
|
||||||
|
|
||||||
public Path createTempCacheDirectory() throws IOException {
|
public Path createTempCacheDirectory() throws IOException {
|
||||||
Path tmp = Files.createTempDirectory("integrationTestCache");
|
Path tmp = Files.createTempDirectory("integrationTestCache");
|
||||||
tmp.toFile().deleteOnExit();
|
tmp.toFile().deleteOnExit();
|
||||||
|
@ -95,9 +109,8 @@ public class TerminologyCacheTests {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCodingCacheTokenGeneration() throws IOException, URISyntaxException {
|
public void testCodingCacheTokenGeneration() throws IOException, URISyntaxException {
|
||||||
Object lock = new Object();
|
|
||||||
|
|
||||||
TerminologyCache terminologyCache = new TerminologyCache(lock, null);
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
ValueSet valueSet = new ValueSet();
|
ValueSet valueSet = new ValueSet();
|
||||||
|
|
||||||
Coding coding = new Coding();
|
Coding coding = new Coding();
|
||||||
|
@ -109,13 +122,13 @@ public class TerminologyCacheTests {
|
||||||
JsonElement expected = getJsonFromFile("codingEmptyValueSet.json");
|
JsonElement expected = getJsonFromFile("codingEmptyValueSet.json");
|
||||||
|
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
|
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCodableConceptCacheTokenGeneration() throws IOException, URISyntaxException {
|
public void testCodableConceptCacheTokenGeneration() throws IOException, URISyntaxException {
|
||||||
Object lock = new Object();
|
|
||||||
TerminologyCache terminologyCache = new TerminologyCache(lock, null);
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
CodeableConcept concept = new CodeableConcept();
|
CodeableConcept concept = new CodeableConcept();
|
||||||
concept.addCoding(new Coding().setCode("dummyCode"));
|
concept.addCoding(new Coding().setCode("dummyCode"));
|
||||||
ValueSet valueSet = new ValueSet();
|
ValueSet valueSet = new ValueSet();
|
||||||
|
@ -126,6 +139,112 @@ public class TerminologyCacheTests {
|
||||||
JsonElement expected = getJsonFromFile("codableConceptEmptyValueSet.json");
|
JsonElement expected = getJsonFromFile("codableConceptEmptyValueSet.json");
|
||||||
|
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
|
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testExpansionToken() throws IOException, URISyntaxException {
|
||||||
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
|
ValueSet valueSet = new ValueSet();
|
||||||
|
|
||||||
|
TerminologyCache.CacheToken expansionToken = terminologyCache.generateExpandToken(valueSet, false);
|
||||||
|
TerminologyCache.CacheToken expansionTokenHierarchical = terminologyCache.generateExpandToken(valueSet, true);
|
||||||
|
|
||||||
|
JsonElement actualExpansion = jsonParser.parse(expansionToken.getRequest());
|
||||||
|
JsonElement expectedExpansion = getJsonFromFile("expansion.json");
|
||||||
|
|
||||||
|
assertEquals(expectedExpansion, actualExpansion);
|
||||||
|
|
||||||
|
JsonElement actualExpansionHierarchical = jsonParser.parse(expansionTokenHierarchical.getRequest());
|
||||||
|
JsonElement expectedExpansionHierarchical = getJsonFromFile("expansionHierarchical.json");
|
||||||
|
|
||||||
|
assertEquals(expectedExpansionHierarchical, actualExpansionHierarchical);
|
||||||
|
assertEquals(terminologyCache.hashJson(expectedExpansion.toString()),
|
||||||
|
terminologyCache.hashJson(actualExpansion.toString()));
|
||||||
|
assertEquals(terminologyCache.hashJson(expectedExpansionHierarchical.toString()),
|
||||||
|
terminologyCache.hashJson(actualExpansionHierarchical.toString()));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetVSEssence() throws IOException {
|
||||||
|
ValueSet.ValueSetExpansionParameterComponent vsepc = new ValueSet.ValueSetExpansionParameterComponent().setName("dummyValueSetExpansionParameterComponent");
|
||||||
|
|
||||||
|
ValueSet vs = new ValueSet();
|
||||||
|
vs.getExpansion().setParameter(Arrays.asList(vsepc));
|
||||||
|
vs.getExpansion().setContains(Arrays.asList(new ValueSet.ValueSetExpansionContainsComponent().setCode("dummyVSExpansionContainsComponent")));
|
||||||
|
vs.getExpansion().setIdentifier("dummyIdentifier");
|
||||||
|
vs.getExpansion().setTimestamp(new Date());
|
||||||
|
|
||||||
|
assertTrue(vs.getExpansion().hasIdentifier());
|
||||||
|
|
||||||
|
TerminologyCache cache = createTerminologyCache();
|
||||||
|
ValueSet vse = cache.getVSEssense(vs);
|
||||||
|
|
||||||
|
assertEquals(vs.getExpansion().getParameter(), vse.getExpansion().getParameter());
|
||||||
|
assertEquals(vs.getExpansion().getContains(), vse.getExpansion().getContains());
|
||||||
|
|
||||||
|
assertFalse(vse.getExpansion().hasIdentifier());
|
||||||
|
assertFalse(vse.getExpansion().hasTimestamp());
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<ValueSet.ValueSetExpansionContainsComponent> createContainsArray(int size) {
|
||||||
|
return IntStream.range(0, size).boxed()
|
||||||
|
.map(value -> new ValueSet.ValueSetExpansionContainsComponent().setCode("dummyVSExpansionContainsComponent"
|
||||||
|
+ value)).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> under1000IntParams() {
|
||||||
|
return getIntParams(0, 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> over1000IntParams() {
|
||||||
|
return getIntParams(1000, 1100);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> getIntParams(int min, int max) {
|
||||||
|
return new Random().ints(5, min, max).boxed().map( value ->
|
||||||
|
Arguments.of(value)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("under1000IntParams")
|
||||||
|
public void testExtractedUnder1000(int max) throws IOException {
|
||||||
|
TerminologyCache cache = createTerminologyCache();
|
||||||
|
ValueSet vs = new ValueSet();
|
||||||
|
|
||||||
|
List<ValueSet.ValueSetExpansionContainsComponent> list = createContainsArray(max);
|
||||||
|
|
||||||
|
vs.setUrl("http://dummy.org");
|
||||||
|
vs.getExpansion().setContains(list);
|
||||||
|
|
||||||
|
org.hl7.fhir.r5.formats.JsonParser json = new org.hl7.fhir.r5.formats.JsonParser();
|
||||||
|
json.setOutputStyle(IParser.OutputStyle.PRETTY);
|
||||||
|
String extracted = cache.extracted(json, vs);
|
||||||
|
|
||||||
|
JsonElement element = jsonParser.parse(extracted);
|
||||||
|
assertEquals(max, element.getAsJsonObject().getAsJsonObject("expansion").getAsJsonArray("contains").size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("over1000IntParams")
|
||||||
|
public void testExtractedOver1000(int max) throws IOException {
|
||||||
|
|
||||||
|
TerminologyCache cache = createTerminologyCache();
|
||||||
|
ValueSet vs = new ValueSet();
|
||||||
|
|
||||||
|
List<ValueSet.ValueSetExpansionContainsComponent> list = createContainsArray(max);
|
||||||
|
|
||||||
|
vs.setUrl("http://dummy.org");
|
||||||
|
vs.getExpansion().setContains(list);
|
||||||
|
|
||||||
|
org.hl7.fhir.r5.formats.JsonParser json = new org.hl7.fhir.r5.formats.JsonParser();
|
||||||
|
json.setOutputStyle(IParser.OutputStyle.PRETTY);
|
||||||
|
String extracted = cache.extracted(json, vs);
|
||||||
|
|
||||||
|
assertEquals("http://dummy.org", extracted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -137,6 +256,5 @@ public class TerminologyCacheTests {
|
||||||
assertTrue(cache.hasTerminologyCapabilities());
|
assertTrue(cache.hasTerminologyCapabilities());
|
||||||
assertTrue(cache.hasCapabilityStatement());
|
assertTrue(cache.hasCapabilityStatement());
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
{"hierarchical" : false, "valueSet" :{
|
||||||
|
"resourceType" : "ValueSet"
|
||||||
|
}}
|
|
@ -0,0 +1,3 @@
|
||||||
|
{"hierarchical" : true, "valueSet" :{
|
||||||
|
"resourceType" : "ValueSet"
|
||||||
|
}}
|
Loading…
Reference in New Issue