Testing Part 7
This commit is contained in:
parent
38728f2c82
commit
7c58619528
|
@ -38,8 +38,10 @@ import java.io.IOException;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
|
import lombok.AccessLevel;
|
||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
import lombok.Setter;
|
import lombok.Setter;
|
||||||
|
import lombok.experimental.Accessors;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.hl7.fhir.exceptions.FHIRException;
|
import org.hl7.fhir.exceptions.FHIRException;
|
||||||
import org.hl7.fhir.r5.context.IWorkerContext.ValidationResult;
|
import org.hl7.fhir.r5.context.IWorkerContext.ValidationResult;
|
||||||
|
@ -92,10 +94,13 @@ public class TerminologyCache {
|
||||||
private int networkCount;
|
private int networkCount;
|
||||||
|
|
||||||
public class CacheToken {
|
public class CacheToken {
|
||||||
|
@Getter
|
||||||
private String name;
|
private String name;
|
||||||
private String key;
|
private String key;
|
||||||
@Getter
|
@Getter
|
||||||
private String request;
|
private String request;
|
||||||
|
@Accessors(fluent = true)
|
||||||
|
@Getter
|
||||||
private boolean hasVersion;
|
private boolean hasVersion;
|
||||||
|
|
||||||
public void setName(String n) {
|
public void setName(String n) {
|
||||||
|
@ -247,7 +252,7 @@ public class TerminologyCache {
|
||||||
return vsc;
|
return vsc;
|
||||||
}
|
}
|
||||||
|
|
||||||
public CacheToken generateExpandToken(ValueSet vs, boolean heirarchical) {
|
public CacheToken generateExpandToken(ValueSet vs, boolean hierarchical) {
|
||||||
CacheToken ct = new CacheToken();
|
CacheToken ct = new CacheToken();
|
||||||
ValueSet vsc = getVSEssense(vs);
|
ValueSet vsc = getVSEssense(vs);
|
||||||
for (ConceptSetComponent inc : vs.getCompose().getInclude())
|
for (ConceptSetComponent inc : vs.getCompose().getInclude())
|
||||||
|
@ -268,7 +273,7 @@ public class TerminologyCache {
|
||||||
JsonParser json = new JsonParser();
|
JsonParser json = new JsonParser();
|
||||||
json.setOutputStyle(OutputStyle.PRETTY);
|
json.setOutputStyle(OutputStyle.PRETTY);
|
||||||
try {
|
try {
|
||||||
ct.request = "{\"hierarchical\" : "+(heirarchical ? "true" : "false")+", \"valueSet\" :"+extracted(json, vsc)+"}\r\n";
|
ct.request = "{\"hierarchical\" : "+(hierarchical ? "true" : "false")+", \"valueSet\" :"+extracted(json, vsc)+"}\r\n";
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new Error(e);
|
throw new Error(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,24 @@ import static org.mockito.Mockito.mock;
|
||||||
|
|
||||||
public class TerminologyCacheTests {
|
public class TerminologyCacheTests {
|
||||||
|
|
||||||
|
static final ValueSet.ConceptSetComponent include = new ValueSet.ConceptSetComponent();
|
||||||
|
static {
|
||||||
|
include.setSystem("dummyIncludeSystem");
|
||||||
|
include.setVersion("dummyIncludeVersion");
|
||||||
|
}
|
||||||
|
|
||||||
|
static final ValueSet.ConceptSetComponent exclude = new ValueSet.ConceptSetComponent();
|
||||||
|
static {
|
||||||
|
exclude.setSystem("dummyExcludeSystem");
|
||||||
|
exclude.setVersion("dummyExcludeVersion");
|
||||||
|
}
|
||||||
|
|
||||||
|
static final ValueSet.ValueSetExpansionContainsComponent containsComponent = new ValueSet.ValueSetExpansionContainsComponent();
|
||||||
|
static {
|
||||||
|
containsComponent.setSystem("dummyContainsSystem");
|
||||||
|
containsComponent.setVersion("dummyContainsVersion");
|
||||||
|
}
|
||||||
|
|
||||||
private JsonParser jsonParser = new JsonParser();
|
private JsonParser jsonParser = new JsonParser();
|
||||||
|
|
||||||
private JsonElement getJsonFromFile(String filename) throws URISyntaxException, IOException {
|
private JsonElement getJsonFromFile(String filename) throws URISyntaxException, IOException {
|
||||||
|
@ -69,13 +87,23 @@ public class TerminologyCacheTests {
|
||||||
Coding coding = new Coding();
|
Coding coding = new Coding();
|
||||||
coding.setCode("dummyCode");
|
coding.setCode("dummyCode");
|
||||||
|
|
||||||
|
CodeableConcept concept = new CodeableConcept();
|
||||||
|
concept.addCoding(new Coding().setCode("dummyCode"));
|
||||||
|
ValueSet ccvalueSet = new ValueSet();
|
||||||
|
|
||||||
|
|
||||||
// Add dummy results to the cache
|
// Add dummy results to the cache
|
||||||
TerminologyCache terminologyCacheA = new TerminologyCache(lock, tempCacheDirectory.toString());
|
TerminologyCache terminologyCacheA = new TerminologyCache(lock, tempCacheDirectory.toString());
|
||||||
|
|
||||||
IWorkerContext.ValidationResult validationResultA = new IWorkerContext.ValidationResult(ValidationMessage.IssueSeverity.INFORMATION, "dummyInfo");
|
IWorkerContext.ValidationResult codingResultA = new IWorkerContext.ValidationResult(ValidationMessage.IssueSeverity.INFORMATION, "dummyInfo");
|
||||||
TerminologyCache.CacheToken codingTokenA = terminologyCacheA.generateValidationToken(CacheTestUtils.validationOptions,
|
TerminologyCache.CacheToken codingTokenA = terminologyCacheA.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
coding, valueSet);
|
coding, valueSet);
|
||||||
terminologyCacheA.cacheValidation(codingTokenA, validationResultA, true);
|
terminologyCacheA.cacheValidation(codingTokenA, codingResultA, true);
|
||||||
|
|
||||||
|
IWorkerContext.ValidationResult codeableConceptResultA = new IWorkerContext.ValidationResult(ValidationMessage.IssueSeverity.INFORMATION, "dummyInfo");
|
||||||
|
TerminologyCache.CacheToken codeableConceptTokenA = terminologyCacheA.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
|
concept, valueSet);
|
||||||
|
terminologyCacheA.cacheValidation(codeableConceptTokenA, codeableConceptResultA, true);
|
||||||
|
|
||||||
TerminologyCache.CacheToken expansionTokenA = terminologyCacheA.generateExpandToken(valueSet, true);
|
TerminologyCache.CacheToken expansionTokenA = terminologyCacheA.generateExpandToken(valueSet, true);
|
||||||
ValueSetExpander.ValueSetExpansionOutcome expansionOutcomeA = new ValueSetExpander.ValueSetExpansionOutcome(valueSet);
|
ValueSetExpander.ValueSetExpansionOutcome expansionOutcomeA = new ValueSetExpander.ValueSetExpansionOutcome(valueSet);
|
||||||
|
@ -83,7 +111,8 @@ public class TerminologyCacheTests {
|
||||||
terminologyCacheA.cacheExpansion(expansionTokenA, expansionOutcomeA, true);
|
terminologyCacheA.cacheExpansion(expansionTokenA, expansionOutcomeA, true);
|
||||||
// Check that the in-memory cache is returning what we put in
|
// Check that the in-memory cache is returning what we put in
|
||||||
{
|
{
|
||||||
assertValidationResultEquals(validationResultA, terminologyCacheA.getValidation(codingTokenA));
|
assertValidationResultEquals(codingResultA, terminologyCacheA.getValidation(codingTokenA));
|
||||||
|
assertValidationResultEquals(codeableConceptResultA, terminologyCacheA.getValidation(codeableConceptTokenA));
|
||||||
assertExpansionOutcomeEquals(expansionOutcomeA,terminologyCacheA.getExpansion(expansionTokenA));
|
assertExpansionOutcomeEquals(expansionOutcomeA,terminologyCacheA.getExpansion(expansionTokenA));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,8 +120,10 @@ public class TerminologyCacheTests {
|
||||||
{
|
{
|
||||||
TerminologyCache terminologyCacheB = new TerminologyCache(lock, tempCacheDirectory.toString());
|
TerminologyCache terminologyCacheB = new TerminologyCache(lock, tempCacheDirectory.toString());
|
||||||
|
|
||||||
assertValidationResultEquals(validationResultA, terminologyCacheB.getValidation(terminologyCacheA.generateValidationToken(CacheTestUtils.validationOptions,
|
assertValidationResultEquals(codingResultA, terminologyCacheB.getValidation(terminologyCacheA.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
coding, valueSet)));
|
coding, valueSet)));
|
||||||
|
assertValidationResultEquals(codeableConceptResultA, terminologyCacheB.getValidation(terminologyCacheA.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
|
concept, valueSet)));
|
||||||
assertExpansionOutcomeEquals(expansionOutcomeA,terminologyCacheB.getExpansion(terminologyCacheA.generateExpandToken(valueSet, true)));
|
assertExpansionOutcomeEquals(expansionOutcomeA,terminologyCacheB.getExpansion(terminologyCacheA.generateExpandToken(valueSet, true)));
|
||||||
}
|
}
|
||||||
deleteTempCacheDirectory(tempCacheDirectory);
|
deleteTempCacheDirectory(tempCacheDirectory);
|
||||||
|
@ -125,6 +156,57 @@ public class TerminologyCacheTests {
|
||||||
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCodingWithSystemCacheTokenGeneration() throws IOException, URISyntaxException {
|
||||||
|
|
||||||
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
|
ValueSet valueSet = new ValueSet();
|
||||||
|
|
||||||
|
Coding coding = new Coding();
|
||||||
|
coding.setCode("dummyCode");
|
||||||
|
coding.setSystem("dummySystem");
|
||||||
|
coding.setVersion("dummyVersion");
|
||||||
|
TerminologyCache.CacheToken cacheToken = terminologyCache.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
|
coding, valueSet );
|
||||||
|
|
||||||
|
JsonElement actual = jsonParser.parse(cacheToken.getRequest());
|
||||||
|
JsonElement expected = getJsonFromFile("codingEmptyValueSetSystem.json");
|
||||||
|
|
||||||
|
assertEquals(expected, actual);
|
||||||
|
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCodingWithSystemCacheTokenGenerationNoSystem() throws IOException, URISyntaxException {
|
||||||
|
|
||||||
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
|
ValueSet valueSet = new ValueSet();
|
||||||
|
|
||||||
|
Coding coding = new Coding();
|
||||||
|
coding.setCode("dummyCode");
|
||||||
|
|
||||||
|
TerminologyCache.CacheToken cacheToken = terminologyCache.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
|
coding, valueSet);
|
||||||
|
assertEquals("all-systems", cacheToken.getName());
|
||||||
|
assertFalse(cacheToken.hasVersion());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCodingWithSystemCacheTokenGenerationWithSystem() throws IOException, URISyntaxException {
|
||||||
|
|
||||||
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
|
ValueSet valueSet = new ValueSet();
|
||||||
|
|
||||||
|
Coding coding = new Coding();
|
||||||
|
coding.setCode("dummyCode");
|
||||||
|
coding.setSystem("dummySystem");
|
||||||
|
coding.setVersion("dummyVersion");
|
||||||
|
TerminologyCache.CacheToken cacheToken = terminologyCache.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
|
coding, valueSet);
|
||||||
|
assertEquals("dummySystem", cacheToken.getName());
|
||||||
|
assertTrue(cacheToken.hasVersion());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCodableConceptCacheTokenGeneration() throws IOException, URISyntaxException {
|
public void testCodableConceptCacheTokenGeneration() throws IOException, URISyntaxException {
|
||||||
|
|
||||||
|
@ -135,6 +217,9 @@ public class TerminologyCacheTests {
|
||||||
TerminologyCache.CacheToken cacheToken = terminologyCache.generateValidationToken(CacheTestUtils.validationOptions,
|
TerminologyCache.CacheToken cacheToken = terminologyCache.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
concept, valueSet );
|
concept, valueSet );
|
||||||
|
|
||||||
|
assertNull(cacheToken.getName());
|
||||||
|
assertEquals(false, cacheToken.hasVersion());
|
||||||
|
|
||||||
JsonElement actual = jsonParser.parse(cacheToken.getRequest());
|
JsonElement actual = jsonParser.parse(cacheToken.getRequest());
|
||||||
JsonElement expected = getJsonFromFile("codableConceptEmptyValueSet.json");
|
JsonElement expected = getJsonFromFile("codableConceptEmptyValueSet.json");
|
||||||
|
|
||||||
|
@ -142,10 +227,98 @@ public class TerminologyCacheTests {
|
||||||
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCodableConceptCacheTokenGenerationWithSystem() throws IOException, URISyntaxException {
|
||||||
|
|
||||||
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
|
CodeableConcept concept = new CodeableConcept();
|
||||||
|
Coding coding = new Coding().setCode("dummyCode");
|
||||||
|
coding.setSystem("dummySystem");
|
||||||
|
coding.setVersion("dummyVersion");
|
||||||
|
concept.addCoding(coding);
|
||||||
|
|
||||||
|
ValueSet valueSet = new ValueSet();
|
||||||
|
TerminologyCache.CacheToken cacheToken = terminologyCache.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
|
concept, valueSet);
|
||||||
|
|
||||||
|
assertEquals("dummySystem", cacheToken.getName());
|
||||||
|
assertEquals(true, cacheToken.hasVersion());
|
||||||
|
|
||||||
|
JsonElement actual = jsonParser.parse(cacheToken.getRequest());
|
||||||
|
JsonElement expected = getJsonFromFile("codableConceptEmptyValueSetSystem.json");
|
||||||
|
|
||||||
|
assertEquals(expected, actual);
|
||||||
|
assertEquals(terminologyCache.hashJson(expected.toString()), terminologyCache.hashJson(actual.toString()));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCodableConceptCacheTokenGenerationNoSystem() throws IOException, URISyntaxException {
|
||||||
|
|
||||||
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
|
CodeableConcept concept = new CodeableConcept();
|
||||||
|
Coding coding = new Coding().setCode("dummyCode");
|
||||||
|
|
||||||
|
concept.addCoding(coding);
|
||||||
|
|
||||||
|
ValueSet valueSet = new ValueSet();
|
||||||
|
TerminologyCache.CacheToken cacheToken = terminologyCache.generateValidationToken(CacheTestUtils.validationOptions,
|
||||||
|
concept, valueSet);
|
||||||
|
|
||||||
|
assertNull(cacheToken.getName());
|
||||||
|
assertFalse(cacheToken.hasVersion());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> getExpansionTokenParams() {
|
||||||
|
ValueSet baseValueSet = new ValueSet();
|
||||||
|
baseValueSet.setUrl("dummyUrl");
|
||||||
|
|
||||||
|
ValueSet withInclude = baseValueSet.copy();
|
||||||
|
withInclude.getCompose().setInclude(Arrays.asList(include));
|
||||||
|
|
||||||
|
ValueSet withExclude = baseValueSet.copy();
|
||||||
|
withExclude.getCompose().setExclude(Arrays.asList(exclude));
|
||||||
|
|
||||||
|
ValueSet withExpansion = baseValueSet.copy();
|
||||||
|
withExpansion.getExpansion().setContains(Arrays.asList(containsComponent));
|
||||||
|
|
||||||
|
ValueSet allSystem = baseValueSet.copy();
|
||||||
|
allSystem.getCompose().setExclude(Arrays.asList(exclude));
|
||||||
|
allSystem.getExpansion().setContains(Arrays.asList(containsComponent));
|
||||||
|
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(baseValueSet, null, false),
|
||||||
|
Arguments.of(withInclude, "dummyIncludeSystem", true),
|
||||||
|
Arguments.of(withExclude, "dummyExcludeSystem", true),
|
||||||
|
Arguments.of(withExpansion, "dummyContainsSystem", true),
|
||||||
|
// Essentially, if more than one system is used, we're switching to 'all-systems'
|
||||||
|
Arguments.of(allSystem, "all-systems", true)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("getExpansionTokenParams")
|
||||||
|
public void testExpansionTokenInclude(ValueSet valueSet, String expectedName, boolean expectedHasVersion) throws IOException, URISyntaxException {
|
||||||
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
|
|
||||||
|
TerminologyCache.CacheToken expansionToken = terminologyCache.generateExpandToken(valueSet, false);
|
||||||
|
TerminologyCache.CacheToken expansionTokenHierarchical = terminologyCache.generateExpandToken(valueSet, true);
|
||||||
|
|
||||||
|
assertEquals(expectedName, expansionToken.getName());
|
||||||
|
assertEquals(expectedName, expansionTokenHierarchical.getName());
|
||||||
|
assertEquals(expectedHasVersion, expansionToken.hasVersion());
|
||||||
|
assertEquals(expectedHasVersion, expansionTokenHierarchical.hasVersion());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpansionToken() throws IOException, URISyntaxException {
|
public void testExpansionToken() throws IOException, URISyntaxException {
|
||||||
TerminologyCache terminologyCache = createTerminologyCache();
|
TerminologyCache terminologyCache = createTerminologyCache();
|
||||||
ValueSet valueSet = new ValueSet();
|
ValueSet valueSet = new ValueSet();
|
||||||
|
valueSet.setUrl("dummyUrl");
|
||||||
|
|
||||||
|
valueSet.getCompose().setInclude(Arrays.asList(include));
|
||||||
|
valueSet.getCompose().setExclude(Arrays.asList(exclude));
|
||||||
|
valueSet.getExpansion().setContains(Arrays.asList(containsComponent));
|
||||||
|
|
||||||
TerminologyCache.CacheToken expansionToken = terminologyCache.generateExpandToken(valueSet, false);
|
TerminologyCache.CacheToken expansionToken = terminologyCache.generateExpandToken(valueSet, false);
|
||||||
TerminologyCache.CacheToken expansionTokenHierarchical = terminologyCache.generateExpandToken(valueSet, true);
|
TerminologyCache.CacheToken expansionTokenHierarchical = terminologyCache.generateExpandToken(valueSet, true);
|
||||||
|
@ -159,6 +332,7 @@ public class TerminologyCacheTests {
|
||||||
JsonElement expectedExpansionHierarchical = getJsonFromFile("expansionHierarchical.json");
|
JsonElement expectedExpansionHierarchical = getJsonFromFile("expansionHierarchical.json");
|
||||||
|
|
||||||
assertEquals(expectedExpansionHierarchical, actualExpansionHierarchical);
|
assertEquals(expectedExpansionHierarchical, actualExpansionHierarchical);
|
||||||
|
|
||||||
assertEquals(terminologyCache.hashJson(expectedExpansion.toString()),
|
assertEquals(terminologyCache.hashJson(expectedExpansion.toString()),
|
||||||
terminologyCache.hashJson(actualExpansion.toString()));
|
terminologyCache.hashJson(actualExpansion.toString()));
|
||||||
assertEquals(terminologyCache.hashJson(expectedExpansionHierarchical.toString()),
|
assertEquals(terminologyCache.hashJson(expectedExpansionHierarchical.toString()),
|
||||||
|
@ -199,7 +373,7 @@ public class TerminologyCacheTests {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Stream<Arguments> over1000IntParams() {
|
private static Stream<Arguments> over1000IntParams() {
|
||||||
return getIntParams(1000, 1100);
|
return getIntParams(1001, 1100);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Stream<Arguments> getIntParams(int min, int max) {
|
private static Stream<Arguments> getIntParams(int min, int max) {
|
||||||
|
@ -246,15 +420,4 @@ public class TerminologyCacheTests {
|
||||||
|
|
||||||
assertEquals("http://dummy.org", extracted);
|
assertEquals("http://dummy.org", extracted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDummyCache() throws IOException {
|
|
||||||
Object lock = new Object();
|
|
||||||
Path path = Paths.get("src","test","resources", "context", "dummyCache");
|
|
||||||
TerminologyCache cache = new TerminologyCache(lock, path.toString());
|
|
||||||
|
|
||||||
assertTrue(cache.hasTerminologyCapabilities());
|
|
||||||
assertTrue(cache.hasCapabilityStatement());
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"code": {
|
||||||
|
"coding": [
|
||||||
|
{
|
||||||
|
"system": "dummySystem",
|
||||||
|
"version": "dummyVersion",
|
||||||
|
"code": "dummyCode"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"valueSet": {
|
||||||
|
"resourceType": "ValueSet"
|
||||||
|
},
|
||||||
|
"lang": "null",
|
||||||
|
"useServer": "true",
|
||||||
|
"useClient": "true",
|
||||||
|
"guessSystem": "true",
|
||||||
|
"valueSetMode": "ALL_CHECKS",
|
||||||
|
"versionFlexible": "false"
|
||||||
|
}
|
|
@ -1,14 +1 @@
|
||||||
{
|
{"code":{"code":"dummyCode"},"valueSet":{"resourceType":"ValueSet"},"lang":"null","useServer":"true","useClient":"true","guessSystem":"true","valueSetMode":"ALL_CHECKS","versionFlexible":"false"}
|
||||||
"code": {
|
|
||||||
"code": "dummyCode"
|
|
||||||
},
|
|
||||||
"valueSet": {
|
|
||||||
"resourceType": "ValueSet"
|
|
||||||
},
|
|
||||||
"lang": "null",
|
|
||||||
"useServer": "true",
|
|
||||||
"useClient": "true",
|
|
||||||
"guessSystem": "true",
|
|
||||||
"valueSetMode": "ALL_CHECKS",
|
|
||||||
"versionFlexible": "false"
|
|
||||||
}
|
|
|
@ -0,0 +1 @@
|
||||||
|
{"code":{"system":"dummySystem","version":"dummyVersion","code":"dummyCode"},"valueSet":{"resourceType":"ValueSet"},"lang":"null","useServer":"true","useClient":"true","guessSystem":"true","valueSetMode":"ALL_CHECKS","versionFlexible":"false"}
|
|
@ -1,62 +0,0 @@
|
||||||
{
|
|
||||||
"resourceType" : "CapabilityStatement",
|
|
||||||
"id" : "FhirServer",
|
|
||||||
"meta" : {
|
|
||||||
"tag" : [{
|
|
||||||
"system" : "http://hl7.org/fhir/v3/ObservationValue",
|
|
||||||
"code" : "SUBSETTED",
|
|
||||||
"display" : "Subsetted"
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
"url" : "http://fhir.healthintersections.com.au/open/metadata",
|
|
||||||
"version" : "4.0.1-2.0.12-SNAPSHOT",
|
|
||||||
"name" : "FHIR Reference Server Conformance Statement",
|
|
||||||
"status" : "active",
|
|
||||||
"date" : "2022-01-10T11:07:19.254Z",
|
|
||||||
"contact" : [{
|
|
||||||
"telecom" : [{
|
|
||||||
"system" : "other",
|
|
||||||
"value" : "http://healthintersections.com.au/"
|
|
||||||
}]
|
|
||||||
}],
|
|
||||||
"kind" : "instance",
|
|
||||||
"instantiates" : ["http://hl7.org/fhir/CapabilityStatement/terminology-server"],
|
|
||||||
"software" : {
|
|
||||||
"name" : "Reference Server",
|
|
||||||
"version" : "2.0.12-SNAPSHOT",
|
|
||||||
"releaseDate" : "2021-12-20T02:28:03.769Z"
|
|
||||||
},
|
|
||||||
"fhirVersion" : "4.0.1",
|
|
||||||
"format" : ["application/fhir+xml",
|
|
||||||
"application/fhir+json"],
|
|
||||||
"rest" : [{
|
|
||||||
"mode" : "server",
|
|
||||||
"security" : {
|
|
||||||
"cors" : true
|
|
||||||
},
|
|
||||||
"operation" : [{
|
|
||||||
"name" : "expand",
|
|
||||||
"definition" : "http://hl7.org/fhir/OperationDefinition/ValueSet-expand"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name" : "lookup",
|
|
||||||
"definition" : "http://hl7.org/fhir/OperationDefinition/ValueSet-lookup"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name" : "validate-code",
|
|
||||||
"definition" : "http://hl7.org/fhir/OperationDefinition/Resource-validate"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name" : "translate",
|
|
||||||
"definition" : "http://hl7.org/fhir/OperationDefinition/ConceptMap-translate"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name" : "closure",
|
|
||||||
"definition" : "http://hl7.org/fhir/OperationDefinition/ConceptMap-closure"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name" : "versions",
|
|
||||||
"definition" : "/OperationDefinition/fso-versions"
|
|
||||||
}]
|
|
||||||
}]
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,11 +0,0 @@
|
||||||
-------------------------------------------------------------------------------------
|
|
||||||
{"code" : {
|
|
||||||
"system" : "http://dummysite.org/fhir/CodeSystem/dummy",
|
|
||||||
"code" : "Every 4 weeks"
|
|
||||||
}, "valueSet" :null, "lang":"null", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"true"}####
|
|
||||||
v: {
|
|
||||||
"severity" : "error",
|
|
||||||
"error" : "The code system 'http://dummysite.org/fhir/CodeSystem/dummy' is not known (encountered paired with code = 'Every 4 weeks'); The code provided (http://dummysite.org/fhir/CodeSystem/dummy#Every 4 weeks) is not valid in the value set 'All codes known to the system' (from http://tx.fhir.org/r4)",
|
|
||||||
"class" : "CODESYSTEM_UNSUPPORTED"
|
|
||||||
}
|
|
||||||
-------------------------------------------------------------------------------------
|
|
|
@ -1,49 +0,0 @@
|
||||||
-------------------------------------------------------------------------------------
|
|
||||||
{"code" : {
|
|
||||||
"system" : "http://unitsofmeasure.org",
|
|
||||||
"code" : "L/min"
|
|
||||||
}, "valueSet" :null, "lang":"fi", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"false"}####
|
|
||||||
v: {
|
|
||||||
"display" : "L/min",
|
|
||||||
"code" : "L/min",
|
|
||||||
"system" : "http://unitsofmeasure.org"
|
|
||||||
}
|
|
||||||
-------------------------------------------------------------------------------------
|
|
||||||
{"code" : {
|
|
||||||
"system" : "http://unitsofmeasure.org",
|
|
||||||
"code" : "21612-7"
|
|
||||||
}, "valueSet" :null, "lang":"fi", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"false"}####
|
|
||||||
v: {
|
|
||||||
"severity" : "error",
|
|
||||||
"error" : "Error processing Unit: '21612-7': Expected \"/\" or \".\" at character 6; The code \"21612-7\" is not valid in the system http://unitsofmeasure.org; The code provided (http://unitsofmeasure.org#21612-7) is not valid in the value set 'All codes known to the system' (from http://tx.fhir.org/r3)"
|
|
||||||
}
|
|
||||||
-------------------------------------------------------------------------------------
|
|
||||||
{"code" : {
|
|
||||||
"system" : "http://unitsofmeasure.org",
|
|
||||||
"code" : "tbl"
|
|
||||||
}, "valueSet" :null, "lang":"null", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"false"}####
|
|
||||||
v: {
|
|
||||||
"severity" : "error",
|
|
||||||
"error" : "Error processing Unit: 'tbl': The unit \"tbl\" is unknown at character 1; The code \"tbl\" is not valid in the system http://unitsofmeasure.org; The code provided (http://unitsofmeasure.org#tbl) is not valid in the value set 'All codes known to the system' (from http://tx.fhir.org/r3)"
|
|
||||||
}
|
|
||||||
-------------------------------------------------------------------------------------
|
|
||||||
{"code" : {
|
|
||||||
"system" : "http://unitsofmeasure.org",
|
|
||||||
"code" : "mmol/L"
|
|
||||||
}, "valueSet" :null, "lang":"null", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"false"}####
|
|
||||||
v: {
|
|
||||||
"display" : "mmol/L",
|
|
||||||
"code" : "mmol/L",
|
|
||||||
"system" : "http://unitsofmeasure.org"
|
|
||||||
}
|
|
||||||
-------------------------------------------------------------------------------------
|
|
||||||
{"code" : {
|
|
||||||
"system" : "http://unitsofmeasure.org",
|
|
||||||
"code" : "[lb_av]"
|
|
||||||
}, "valueSet" :null, "lang":"null", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"true"}####
|
|
||||||
v: {
|
|
||||||
"display" : "[lb_av]",
|
|
||||||
"code" : "[lb_av]",
|
|
||||||
"system" : "http://unitsofmeasure.org"
|
|
||||||
}
|
|
||||||
-------------------------------------------------------------------------------------
|
|
|
@ -1,3 +1,28 @@
|
||||||
{"hierarchical" : false, "valueSet" :{
|
{
|
||||||
"resourceType" : "ValueSet"
|
"hierarchical": false,
|
||||||
}}
|
"valueSet": {
|
||||||
|
"resourceType": "ValueSet",
|
||||||
|
"compose": {
|
||||||
|
"include": [
|
||||||
|
{
|
||||||
|
"system": "dummyIncludeSystem",
|
||||||
|
"version": "dummyIncludeVersion"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
{
|
||||||
|
"system": "dummyExcludeSystem",
|
||||||
|
"version": "dummyExcludeVersion"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"expansion": {
|
||||||
|
"contains": [
|
||||||
|
{
|
||||||
|
"system": "dummyContainsSystem",
|
||||||
|
"version": "dummyContainsVersion"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,3 +1,28 @@
|
||||||
{"hierarchical" : true, "valueSet" :{
|
{
|
||||||
"resourceType" : "ValueSet"
|
"hierarchical": true,
|
||||||
}}
|
"valueSet": {
|
||||||
|
"resourceType": "ValueSet",
|
||||||
|
"compose": {
|
||||||
|
"include": [
|
||||||
|
{
|
||||||
|
"system": "dummyIncludeSystem",
|
||||||
|
"version": "dummyIncludeVersion"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
{
|
||||||
|
"system": "dummyExcludeSystem",
|
||||||
|
"version": "dummyExcludeVersion"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"expansion": {
|
||||||
|
"contains": [
|
||||||
|
{
|
||||||
|
"system": "dummyContainsSystem",
|
||||||
|
"version": "dummyContainsVersion"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue