Merge branch 'hapifhir:master' into master
This commit is contained in:
commit
83a323ac33
|
@ -0,0 +1,27 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
|
||||
<module name="Checker">
|
||||
|
||||
<property name="severity" value="error"/>
|
||||
<property name="charset" value="UTF-8"/>
|
||||
<property name="fileExtensions" value="java, properties, xml, js, json"/>
|
||||
<module name="TreeWalker">
|
||||
<!--
|
||||
<module name="TodoComment">-->
|
||||
<!-- The (?i) below means Case Insensitive -->
|
||||
<!--<property name="format" value="(?i)FIXME"/>
|
||||
-->
|
||||
<module name="RegexpSinglelineJava">
|
||||
<property name="format" value="org\.jetbrains\.annotations\.NotNull"/>
|
||||
</module>
|
||||
<module name="RegexpSinglelineJava">
|
||||
<property name="format" value="org\.jetbrains\.annotations\.Nullable"/>
|
||||
</module>
|
||||
<module name="RegexpSinglelineJava">
|
||||
<property name="format" value="org\.jetbrains\.annotations\.\*"/>
|
||||
</module>
|
||||
</module>
|
||||
</module>
|
|
@ -0,0 +1,8 @@
|
|||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
informational: true
|
||||
patch:
|
||||
default:
|
||||
informational: true
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -87,7 +87,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
|
|
@ -3,8 +3,22 @@ package org.hl7.fhir.convertors.loaders.loaderR5;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.r5.conformance.profile.ProfileUtilities;
|
||||
import org.hl7.fhir.r5.context.IWorkerContext.IContextResourceLoader;
|
||||
import org.hl7.fhir.r5.model.CanonicalResource;
|
||||
import org.hl7.fhir.r5.model.CanonicalType;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement;
|
||||
import org.hl7.fhir.r5.model.CodeSystem;
|
||||
import org.hl7.fhir.r5.model.ElementDefinition;
|
||||
import org.hl7.fhir.r5.model.OperationDefinition;
|
||||
import org.hl7.fhir.r5.model.OperationDefinition.OperationDefinitionParameterComponent;
|
||||
import org.hl7.fhir.r5.model.Resource;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition;
|
||||
import org.hl7.fhir.r5.model.UriType;
|
||||
import org.hl7.fhir.r5.model.ValueSet;
|
||||
import org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent;
|
||||
import org.hl7.fhir.r5.model.ElementDefinition.TypeRefComponent;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.VersionUtilities;
|
||||
import org.hl7.fhir.utilities.npm.NpmPackage;
|
||||
|
||||
|
@ -18,12 +32,8 @@ import lombok.experimental.Accessors;
|
|||
public abstract class BaseLoaderR5 implements IContextResourceLoader {
|
||||
|
||||
protected final String URL_BASE = "http://hl7.org/fhir/";
|
||||
protected final String URL_DSTU2 = "http://hl7.org/fhir/1.0/";
|
||||
protected final String URL_DSTU2016MAY = "http://hl7.org/fhir/1.4/";
|
||||
protected final String URL_DSTU3 = "http://hl7.org/fhir/3.0/";
|
||||
protected final String URL_R4 = "http://hl7.org/fhir/4.0/";
|
||||
protected final String URL_ELEMENT_DEF_NAMESPACE = "http://hl7.org/fhir/StructureDefinition/elementdefinition-namespace";
|
||||
@Getter @Setter protected boolean patchUrls;
|
||||
protected boolean patchUrls;
|
||||
@Getter @Setter protected boolean killPrimitives;
|
||||
@Getter protected String[] types;
|
||||
protected ILoaderKnowledgeProviderR5 lkp;
|
||||
|
@ -73,4 +83,87 @@ public abstract class BaseLoaderR5 implements IContextResourceLoader {
|
|||
}
|
||||
}
|
||||
|
||||
public boolean isPatchUrls() {
|
||||
return patchUrls;
|
||||
}
|
||||
|
||||
public void setPatchUrls(boolean patchUrls) {
|
||||
this.patchUrls = patchUrls;
|
||||
}
|
||||
|
||||
protected abstract String versionString();
|
||||
|
||||
|
||||
@Override
|
||||
public String patchUrl(String url, String type) {
|
||||
if (!patchUrls || url == null) {
|
||||
return url;
|
||||
} else if (url.startsWith("http://hl7.org/fhir/"+type+"/")) {
|
||||
return "http://hl7.org/fhir/"+versionString()+"/"+url.substring(20);
|
||||
} else if ("CodeSystem".equals(type) && url.startsWith("http://hl7.org/fhir/")) {
|
||||
return "http://hl7.org/fhir/"+versionString()+"/"+url.substring(20);
|
||||
} else {
|
||||
return url;
|
||||
}
|
||||
}
|
||||
|
||||
// we don't patch everything. It's quite hard work to do that,
|
||||
// and we only patch URLs to support version transforms
|
||||
// so we just patch sd/od -> vs -> cs
|
||||
protected void doPatchUrls(Resource resource) {
|
||||
if (resource instanceof CanonicalResource) {
|
||||
CanonicalResource cr = (CanonicalResource) resource;
|
||||
cr.setUrl(patchUrl(cr.getUrl(), cr.fhirType()));
|
||||
if (cr instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) cr;
|
||||
sd.setBaseDefinition(patchUrl(sd.getBaseDefinition(), sd.fhirType()));
|
||||
new ProfileUtilities(null, null, null, null).setIds(sd, false);
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
}
|
||||
|
||||
if (cr instanceof ValueSet) {
|
||||
ValueSet vs = (ValueSet) cr;
|
||||
for (ConceptSetComponent inc : vs.getCompose().getInclude()) {
|
||||
inc.setSystem(patchUrl(inc.getSystem(), "CodeSystem"));
|
||||
}
|
||||
for (ConceptSetComponent inc : vs.getCompose().getExclude()) {
|
||||
inc.setSystem(patchUrl(inc.getSystem(), "CodeSystem"));
|
||||
}
|
||||
}
|
||||
if (cr instanceof OperationDefinition) {
|
||||
OperationDefinition od = (OperationDefinition) cr;
|
||||
for (OperationDefinitionParameterComponent param : od.getParameter()) {
|
||||
patchUrls(param);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void patchUrls(OperationDefinitionParameterComponent param) {
|
||||
if (param.hasBinding()) {
|
||||
param.getBinding().setValueSet(patchUrl(param.getBinding().getValueSet(), "ValueSet"));
|
||||
}
|
||||
for (OperationDefinitionParameterComponent p : param.getPart()) {
|
||||
patchUrls(p);
|
||||
}
|
||||
}
|
||||
|
||||
private void patchUrl(ElementDefinition ed) {
|
||||
for (TypeRefComponent tr : ed.getType()) {
|
||||
if (!Utilities.isAbsoluteUrl(tr.getCode())) {
|
||||
tr.setCode(URL_BASE+versionString()+"/StructureDefinition/"+tr.getCode());
|
||||
}
|
||||
for (CanonicalType s : tr.getTargetProfile()) {
|
||||
s.setValue(patchUrl(s.getValue(), "StructureDefinitino"));
|
||||
}
|
||||
}
|
||||
if (ed.hasBinding()) {
|
||||
ed.getBinding().setValueSet(patchUrl(ed.getBinding().getValueSet(), "ValueSet"));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -98,13 +98,10 @@ public class R2016MayToR5Loader extends BaseLoaderR5 {
|
|||
}
|
||||
b.getEntry().removeAll(remove);
|
||||
}
|
||||
for (BundleEntryComponent be : b.getEntry()) {
|
||||
if (be.hasResource() && be.getResource() instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) be.getResource();
|
||||
new ProfileUtilities(null, null, null, null).setIds(sd, false);
|
||||
if (patchUrls) {
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_DSTU2016MAY));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (BundleEntryComponent be : b.getEntry()) {
|
||||
if (be.hasResource()) {
|
||||
doPatchUrls(be.getResource());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -128,33 +125,20 @@ public class R2016MayToR5Loader extends BaseLoaderR5 {
|
|||
throw new FHIRException("Cannot kill primitives when using deferred loading");
|
||||
}
|
||||
if (patchUrls) {
|
||||
if (r5 instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) r5;
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_R4));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
}
|
||||
doPatchUrls(r5);
|
||||
}
|
||||
return r5;
|
||||
}
|
||||
|
||||
private void patchUrl(ElementDefinition ed) {
|
||||
for (TypeRefComponent tr : ed.getType()) {
|
||||
for (CanonicalType s : tr.getTargetProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_DSTU2016MAY));
|
||||
}
|
||||
for (CanonicalType s : tr.getProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_DSTU2016MAY));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CodeSystem> getCodeSystems() {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String versionString() {
|
||||
return "4.3";
|
||||
}
|
||||
|
||||
}
|
|
@ -102,10 +102,8 @@ public class R2ToR5Loader extends BaseLoaderR5 implements IContextResourceLoader
|
|||
}
|
||||
if (patchUrls) {
|
||||
for (BundleEntryComponent be : b.getEntry()) {
|
||||
if (be.hasResource() && be.getResource() instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) be.getResource();
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_DSTU2));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
if (be.hasResource()) {
|
||||
doPatchUrls(be.getResource());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -126,30 +124,11 @@ public class R2ToR5Loader extends BaseLoaderR5 implements IContextResourceLoader
|
|||
throw new FHIRException("Cannot kill primitives when using deferred loading");
|
||||
}
|
||||
if (patchUrls) {
|
||||
if (r5 instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) r5;
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_R4));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
}
|
||||
doPatchUrls(r5);
|
||||
}
|
||||
return r5;
|
||||
}
|
||||
|
||||
private void patchUrl(ElementDefinition ed) {
|
||||
for (TypeRefComponent tr : ed.getType()) {
|
||||
for (CanonicalType s : tr.getTargetProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_DSTU2));
|
||||
}
|
||||
for (CanonicalType s : tr.getProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_DSTU2));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CodeSystem> getCodeSystems() {
|
||||
List<CodeSystem> list = new ArrayList<>();
|
||||
|
@ -160,4 +139,9 @@ public class R2ToR5Loader extends BaseLoaderR5 implements IContextResourceLoader
|
|||
return list;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String versionString() {
|
||||
return "1.0";
|
||||
}
|
||||
|
||||
}
|
|
@ -99,14 +99,8 @@ public class R3ToR5Loader extends BaseLoaderR5 implements IContextResourceLoader
|
|||
}
|
||||
if (patchUrls) {
|
||||
for (BundleEntryComponent be : b.getEntry()) {
|
||||
if (be.hasResource() && be.getResource() instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) be.getResource();
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_DSTU3));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
if (be.hasResource()) {
|
||||
doPatchUrls(be.getResource());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -130,33 +124,19 @@ public class R3ToR5Loader extends BaseLoaderR5 implements IContextResourceLoader
|
|||
throw new FHIRException("Cannot kill primitives when using deferred loading");
|
||||
}
|
||||
if (patchUrls) {
|
||||
if (r5 instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) r5;
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_R4));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
}
|
||||
doPatchUrls(r5);
|
||||
}
|
||||
return r5;
|
||||
}
|
||||
|
||||
private void patchUrl(ElementDefinition ed) {
|
||||
for (TypeRefComponent tr : ed.getType()) {
|
||||
for (CanonicalType s : tr.getTargetProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_DSTU3));
|
||||
}
|
||||
for (CanonicalType s : tr.getProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_DSTU3));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CodeSystem> getCodeSystems() {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String versionString() {
|
||||
return "3.0";
|
||||
}
|
||||
|
||||
}
|
|
@ -103,14 +103,8 @@ public class R4BToR5Loader extends BaseLoaderR5 implements IContextResourceLoade
|
|||
}
|
||||
if (patchUrls) {
|
||||
for (BundleEntryComponent be : b.getEntry()) {
|
||||
if (be.hasResource() && be.getResource() instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) be.getResource();
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_R4));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
if (be.hasResource()) {
|
||||
doPatchUrls(be.getResource());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -137,34 +131,20 @@ public class R4BToR5Loader extends BaseLoaderR5 implements IContextResourceLoade
|
|||
r5 = new StructureDefinitionHacker(version).fixSD((StructureDefinition) r5);
|
||||
}
|
||||
if (patchUrls) {
|
||||
if (r5 instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) r5;
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, "http://hl7.org/fhir/4.0/"));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType("http://hl7.org/fhir"));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
}
|
||||
doPatchUrls(r5);
|
||||
}
|
||||
return r5;
|
||||
}
|
||||
|
||||
private void patchUrl(ElementDefinition ed) {
|
||||
for (TypeRefComponent tr : ed.getType()) {
|
||||
for (CanonicalType s : tr.getTargetProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, "http://hl7.org/fhir/4.0/"));
|
||||
}
|
||||
for (CanonicalType s : tr.getProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, "http://hl7.org/fhir/4.0/"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CodeSystem> getCodeSystems() {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String versionString() {
|
||||
return "4.3";
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -103,14 +103,8 @@ public class R4ToR5Loader extends BaseLoaderR5 implements IContextResourceLoader
|
|||
}
|
||||
if (patchUrls) {
|
||||
for (BundleEntryComponent be : b.getEntry()) {
|
||||
if (be.hasResource() && be.getResource() instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) be.getResource();
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_R4));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
if (be.hasResource()) {
|
||||
doPatchUrls(be.getResource());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -137,34 +131,21 @@ public class R4ToR5Loader extends BaseLoaderR5 implements IContextResourceLoader
|
|||
r5 = new StructureDefinitionHacker(version).fixSD((StructureDefinition) r5);
|
||||
}
|
||||
if (patchUrls) {
|
||||
if (r5 instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) r5;
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, "http://hl7.org/fhir/4.0/"));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType("http://hl7.org/fhir"));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
}
|
||||
doPatchUrls(r5);
|
||||
}
|
||||
return r5;
|
||||
}
|
||||
|
||||
private void patchUrl(ElementDefinition ed) {
|
||||
for (TypeRefComponent tr : ed.getType()) {
|
||||
for (CanonicalType s : tr.getTargetProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, "http://hl7.org/fhir/4.0/"));
|
||||
}
|
||||
for (CanonicalType s : tr.getProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, "http://hl7.org/fhir/4.0/"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CodeSystem> getCodeSystems() {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String versionString() {
|
||||
return "4.0";
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -96,14 +96,8 @@ public class R5ToR5Loader extends BaseLoaderR5 {
|
|||
}
|
||||
if (patchUrls) {
|
||||
for (BundleEntryComponent be : b.getEntry()) {
|
||||
if (be.hasResource() && be.getResource() instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) be.getResource();
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_R4));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
if (be.hasResource()) {
|
||||
doPatchUrls(be.getResource());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -126,34 +120,20 @@ public class R5ToR5Loader extends BaseLoaderR5 {
|
|||
throw new FHIRException("Cannot kill primitives when using deferred loading");
|
||||
}
|
||||
if (patchUrls) {
|
||||
if (r5 instanceof StructureDefinition) {
|
||||
StructureDefinition sd = (StructureDefinition) r5;
|
||||
sd.setUrl(sd.getUrl().replace(URL_BASE, URL_R4));
|
||||
sd.addExtension().setUrl(URL_ELEMENT_DEF_NAMESPACE).setValue(new UriType(URL_BASE));
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement())
|
||||
patchUrl(ed);
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement())
|
||||
patchUrl(ed);
|
||||
}
|
||||
doPatchUrls(r5);
|
||||
}
|
||||
return r5;
|
||||
}
|
||||
|
||||
private void patchUrl(ElementDefinition ed) {
|
||||
for (TypeRefComponent tr : ed.getType()) {
|
||||
for (CanonicalType s : tr.getTargetProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_R4));
|
||||
}
|
||||
for (CanonicalType s : tr.getProfile()) {
|
||||
s.setValue(s.getValue().replace(URL_BASE, URL_R4));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CodeSystem> getCodeSystems() {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String versionString() {
|
||||
return "5.0";
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -60,7 +60,7 @@ public class TerminologyClientFactory {
|
|||
private static String checkEndsWith(String term, String url) {
|
||||
if (url.endsWith(term))
|
||||
return url;
|
||||
if (url.startsWith("http://tx.fhir.org"))
|
||||
if (url.startsWith("http://tx.fhir.org") || url.startsWith("https://tx.fhir.org"))
|
||||
return Utilities.pathURL(url, term);
|
||||
if (url.equals("http://local.fhir.org:8080"))
|
||||
return Utilities.pathURL(url, term);
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
package org.hl7.fhir.convertors.txClient;
|
||||
|
||||
import org.hl7.fhir.r5.terminologies.TerminologyClient;
|
||||
import org.hl7.fhir.utilities.FhirPublication;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
public class TerminologyClientFactoryTest {
|
||||
|
||||
public static Stream<Arguments> data() throws ParserConfigurationException, SAXException, IOException {
|
||||
List<Arguments> objects = new ArrayList<>();
|
||||
objects.addAll(getDefaultServerArgs("tx.fhir.org/r4", "tx.fhir.org", FhirPublication.R4));
|
||||
objects.addAll(getHttpAndHttpsArgs("tx.fhir.org", null, "tx.fhir.org/r4"));
|
||||
objects.addAll(getHttpAndHttpsArgs("tx.fhir.org/r4", null, "tx.fhir.org/r4"));
|
||||
objects.addAll(getDefaultServerArgs("tx.fhir.org/r2", "tx.fhir.org", FhirPublication.DSTU2));
|
||||
objects.addAll(getDefaultServerArgs("tx.fhir.org/r3", "tx.fhir.org", FhirPublication.DSTU2016May));
|
||||
objects.addAll(getDefaultServerArgs("tx.fhir.org/r4", "tx.fhir.org", FhirPublication.R4B));
|
||||
objects.addAll(getDefaultServerArgs("tx.fhir.org/r4", "tx.fhir.org", FhirPublication.R5));
|
||||
objects.addAll(getDefaultServerArgs("tx.fhir.org/r3", "tx.fhir.org", FhirPublication.STU3));
|
||||
objects.addAll(getHttpAndHttpsArgs("someserver.org", FhirPublication.R4, "someserver.org"));
|
||||
objects.addAll(getHttpAndHttpsArgs("someserver.org", null, "someserver.org"));
|
||||
return objects.stream();
|
||||
}
|
||||
|
||||
private static List<Arguments> getDefaultServerArgs(String explicitUrl, String baseUrl, FhirPublication fhirPublication) {
|
||||
List<Arguments> objects = new ArrayList<>();
|
||||
objects.addAll(getHttpAndHttpsArgs(baseUrl, fhirPublication, explicitUrl));
|
||||
objects.addAll(getHttpAndHttpsArgs(explicitUrl, fhirPublication, explicitUrl));
|
||||
return objects;
|
||||
}
|
||||
|
||||
private static List<Arguments> getHttpAndHttpsArgs(String baseUrl, FhirPublication fhirPublication, String baseExpectedAddress) {
|
||||
return List.of(
|
||||
Arguments.of("https://" + baseUrl, fhirPublication, "https://" + baseExpectedAddress),
|
||||
Arguments.of("http://" + baseUrl, fhirPublication, "http://" + baseExpectedAddress)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("data")
|
||||
public void testMakeClient(String url, FhirPublication fhirPublication, String expectedAddress) throws URISyntaxException {
|
||||
TerminologyClient terminologyClient = TerminologyClientFactory.makeClient(url, "dummyUserAgent", fhirPublication);
|
||||
assertEquals(expectedAddress, terminologyClient.getAddress());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMakeClientDstu1Fails() throws URISyntaxException {
|
||||
assertThrows(Error.class, () -> {
|
||||
TerminologyClient terminologyClient = TerminologyClientFactory.makeClient("urldoesnotmatter", "dummyUserAgent", FhirPublication.DSTU1);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMakeClientNullFails() throws URISyntaxException {
|
||||
assertThrows(Error.class, () -> {
|
||||
TerminologyClient terminologyClient = TerminologyClientFactory.makeClient("urldoesnotmatter", "dummyUserAgent", FhirPublication.NULL);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
|
@ -29,8 +29,8 @@ public class RdfParser extends RdfParserBase {
|
|||
private void composeEnum(Complex parent, String parentType, String name, Enumeration<? extends Enum> value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
decorateCode(t, value);
|
||||
}
|
||||
|
@ -39,24 +39,24 @@ public class RdfParser extends RdfParserBase {
|
|||
protected void composeDate(Complex parent, String parentType, String name, DateType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeDateTime(Complex parent, String parentType, String name, DateTimeType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeCode(Complex parent, String parentType, String name, CodeType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
decorateCode(t, value);
|
||||
}
|
||||
|
@ -64,136 +64,136 @@ public class RdfParser extends RdfParserBase {
|
|||
protected void composeString(Complex parent, String parentType, String name, StringType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeInteger(Complex parent, String parentType, String name, IntegerType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeInteger64(Complex parent, String parentType, String name, Integer64Type value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeOid(Complex parent, String parentType, String name, OidType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeCanonical(Complex parent, String parentType, String name, CanonicalType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeUri(Complex parent, String parentType, String name, UriType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeUuid(Complex parent, String parentType, String name, UuidType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeUrl(Complex parent, String parentType, String name, UrlType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeInstant(Complex parent, String parentType, String name, InstantType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeBoolean(Complex parent, String parentType, String name, BooleanType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeBase64Binary(Complex parent, String parentType, String name, Base64BinaryType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeUnsignedInt(Complex parent, String parentType, String name, UnsignedIntType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeMarkdown(Complex parent, String parentType, String name, MarkdownType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeTime(Complex parent, String parentType, String name, TimeType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeId(Complex parent, String parentType, String name, IdType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composePositiveInt(Complex parent, String parentType, String name, PositiveIntType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
protected void composeDecimal(Complex parent, String parentType, String name, DecimalType value, int index) {
|
||||
if (value == null)
|
||||
return;
|
||||
Complex t = parent.predicate("fhir:"+parentType+"."+name);
|
||||
t.predicate("fhir:value", ttlLiteral(value.asStringValue()));
|
||||
Complex t = parent.predicate("fhir:"+name, index > -1);
|
||||
t.predicate("fhir:v", ttlLiteral(value.asStringValue()), false);
|
||||
composeElement(t, parentType, name, value, index);
|
||||
}
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ public class JavaParserRdfGenerator extends JavaBaseGenerator {
|
|||
composer.append(" if (Utilities.noString(parentType))\r\n");
|
||||
composer.append(" t = parent;\r\n");
|
||||
composer.append(" else {\r\n");
|
||||
composer.append(" t = parent.predicate(\"fhir:\"+parentType+'.'+name);\r\n");
|
||||
composer.append(" t = parent.predicate(\"fhir:\"+name,index > -1);\r\n");
|
||||
composer.append(" }\r\n");
|
||||
composer.append(" compose"+ti.getAncestorName()+"(t, \""+ti.getDefn().getName()+"\", name, element, index);\r\n");
|
||||
if (tn.equals("Coding"))
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -79,6 +79,7 @@ import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator;
|
|||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Cell;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Piece;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Row;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableGenerationMode;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableModel;
|
||||
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
|
||||
import org.hl7.fhir.utilities.xml.SchematronWriter;
|
||||
|
@ -1183,7 +1184,7 @@ public class ProfileUtilities {
|
|||
|
||||
public XhtmlNode generateExtensionTable(String defFile, StructureDefinition ed, String imageFolder, boolean inlineGraphics, boolean full, String corePath, Set<String> outputTracker) throws IOException, FHIRException {
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false, TableGenerationMode.XML);
|
||||
|
||||
boolean deep = false;
|
||||
boolean vdeep = false;
|
||||
|
@ -1447,7 +1448,7 @@ public class ProfileUtilities {
|
|||
public XhtmlNode generateTable(String defFile, StructureDefinition profile, boolean diff, String imageFolder, boolean inlineGraphics, String profileBaseFileName, boolean snapshot, String corePath, Set<String> outputTracker) throws IOException, FHIRException {
|
||||
assert(diff != snapshot);// check it's ok to get rid of one of these
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false, TableGenerationMode.XML);
|
||||
List<ElementDefinition> list = diff ? profile.getDifferential().getElement() : profile.getSnapshot().getElement();
|
||||
List<StructureDefinition> profiles = new ArrayList<StructureDefinition>();
|
||||
profiles.add(profile);
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -80,6 +80,7 @@ import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator;
|
|||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Cell;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Piece;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Row;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableGenerationMode;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableModel;
|
||||
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
|
||||
import org.hl7.fhir.utilities.xml.SchematronWriter;
|
||||
|
@ -1184,7 +1185,7 @@ public class ProfileUtilities {
|
|||
|
||||
public XhtmlNode generateExtensionTable(String defFile, StructureDefinition ed, String imageFolder, boolean inlineGraphics, boolean full, String corePath, Set<String> outputTracker) throws IOException, FHIRException {
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false, TableGenerationMode.XML);
|
||||
|
||||
boolean deep = false;
|
||||
boolean vdeep = false;
|
||||
|
@ -1446,7 +1447,7 @@ public class ProfileUtilities {
|
|||
public XhtmlNode generateTable(String defFile, StructureDefinition profile, boolean diff, String imageFolder, boolean inlineGraphics, String profileBaseFileName, boolean snapshot, String corePath, boolean logicalModel, Set<String> outputTracker) throws IOException, FHIRException {
|
||||
assert(diff != snapshot);// check it's ok to get rid of one of these
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false, TableGenerationMode.XML);
|
||||
List<ElementDefinition> list = diff ? profile.getDifferential().getElement() : profile.getSnapshot().getElement();
|
||||
List<StructureDefinition> profiles = new ArrayList<StructureDefinition>();
|
||||
profiles.add(profile);
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -55,7 +55,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
|
|
@ -102,6 +102,7 @@ import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator;
|
|||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Cell;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Piece;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Row;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableGenerationMode;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableModel;
|
||||
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
|
||||
import org.hl7.fhir.utilities.xml.SchematronWriter;
|
||||
|
@ -1621,7 +1622,7 @@ public class ProfileUtilities extends TranslatingUtilities {
|
|||
public XhtmlNode generateExtensionTable(String defFile, StructureDefinition ed, String imageFolder, boolean inlineGraphics, boolean full, String corePath, String imagePath, Set<String> outputTracker) throws IOException, FHIRException {
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics);
|
||||
gen.setTranslator(getTranslator());
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false, TableGenerationMode.XML);
|
||||
|
||||
boolean deep = false;
|
||||
String m = "";
|
||||
|
@ -1990,7 +1991,7 @@ public class ProfileUtilities extends TranslatingUtilities {
|
|||
assert(diff != snapshot);// check it's ok to get rid of one of these
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics);
|
||||
gen.setTranslator(getTranslator());
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false, TableGenerationMode.XML);
|
||||
List<ElementDefinition> list = diff ? profile.getDifferential().getElement() : profile.getSnapshot().getElement();
|
||||
List<StructureDefinition> profiles = new ArrayList<StructureDefinition>();
|
||||
profiles.add(profile);
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -67,7 +67,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -110,6 +110,7 @@ import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator;
|
|||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Cell;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Piece;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Row;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableGenerationMode;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableModel;
|
||||
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
|
||||
import org.hl7.fhir.utilities.xml.SchematronWriter;
|
||||
|
@ -2149,7 +2150,7 @@ public class ProfileUtilities extends TranslatingUtilities {
|
|||
public XhtmlNode generateExtensionTable(String defFile, StructureDefinition ed, String imageFolder, boolean inlineGraphics, boolean full, String corePath, String imagePath, Set<String> outputTracker) throws IOException, FHIRException {
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics, true);
|
||||
gen.setTranslator(getTranslator());
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId(), false, TableGenerationMode.XML);
|
||||
|
||||
boolean deep = false;
|
||||
String m = "";
|
||||
|
@ -2519,7 +2520,7 @@ public class ProfileUtilities extends TranslatingUtilities {
|
|||
assert(diff != snapshot);// check it's ok to get rid of one of these
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics, true);
|
||||
gen.setTranslator(getTranslator());
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), false, TableGenerationMode.XML);
|
||||
List<ElementDefinition> list = diff ? profile.getDifferential().getElement() : profile.getSnapshot().getElement();
|
||||
List<StructureDefinition> profiles = new ArrayList<StructureDefinition>();
|
||||
profiles.add(profile);
|
||||
|
|
|
@ -265,8 +265,10 @@ public class StructureMapUtilities {
|
|||
public StructureMapUtilities(IWorkerContext worker) {
|
||||
super();
|
||||
this.worker = worker;
|
||||
if (worker != null) {
|
||||
fpe = new FHIRPathEngine(worker);
|
||||
fpe.setHostServices(new FFHIRPathHostServices());
|
||||
}
|
||||
}
|
||||
|
||||
public static String render(StructureMap map) {
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -61,19 +61,19 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>logging-interceptor</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>mockwebserver</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
|
|
@ -130,6 +130,7 @@ import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator;
|
|||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Cell;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Piece;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Row;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableGenerationMode;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableModel;
|
||||
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
|
||||
import org.hl7.fhir.utilities.xml.SchematronWriter;
|
||||
|
@ -3471,7 +3472,7 @@ public class ProfileUtilities extends TranslatingUtilities {
|
|||
public XhtmlNode generateExtensionTable(String defFile, StructureDefinition ed, String imageFolder, boolean inlineGraphics, boolean full, String corePath, String imagePath, Set<String> outputTracker, RenderingContext rc) throws IOException, FHIRException {
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics, true);
|
||||
gen.setTranslator(getTranslator());
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId()+(full ? "f" : "n"), true);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId()+(full ? "f" : "n"), true, TableGenerationMode.XHTML);
|
||||
|
||||
boolean deep = false;
|
||||
String m = "";
|
||||
|
@ -3988,7 +3989,7 @@ public class ProfileUtilities extends TranslatingUtilities {
|
|||
assert(diff != snapshot);// check it's ok to get rid of one of these
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics, true);
|
||||
gen.setTranslator(getTranslator());
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), active);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), active, active ? TableGenerationMode.XHTML : TableGenerationMode.XML);
|
||||
List<ElementDefinition> list = new ArrayList<>();
|
||||
if (diff)
|
||||
list.addAll(profile.getDifferential().getElement());
|
||||
|
|
|
@ -58,7 +58,7 @@ public abstract class BaseResource extends Base implements IAnyResource, IElemen
|
|||
|
||||
@Override
|
||||
public FhirVersionEnum getStructureFhirVersionEnum() {
|
||||
return FhirVersionEnum.R5; // to: change to R5
|
||||
return FhirVersionEnum.R4B;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -111,13 +111,13 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>logging-interceptor</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
@ -148,7 +148,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>mockwebserver</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.hl7.fhir.r5.model.NamingSystem.NamingSystemIdentifierType;
|
|||
import org.hl7.fhir.r5.model.NamingSystem.NamingSystemUniqueIdComponent;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition.StructureDefinitionKind;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition.TypeDerivationRule;
|
||||
import org.hl7.fhir.r5.model.StructureMap;
|
||||
import org.hl7.fhir.r5.utils.ToolingExtensions;
|
||||
import org.hl7.fhir.r5.utils.XVerExtensionManager;
|
||||
import org.hl7.fhir.r5.model.Identifier;
|
||||
|
@ -42,6 +43,9 @@ public class ContextUtilities implements ProfileKnowledgeProvider {
|
|||
private boolean ignoreProfileErrors;
|
||||
private XVerExtensionManager xverManager;
|
||||
private Map<String, String> oidCache = new HashMap<>();
|
||||
private List<StructureDefinition> allStructuresList = new ArrayList<StructureDefinition>();
|
||||
private List<String> canonicalResourceNames;
|
||||
private List<String> concreteResourceNames;
|
||||
|
||||
public ContextUtilities(IWorkerContext context) {
|
||||
super();
|
||||
|
@ -196,20 +200,24 @@ public class ContextUtilities implements ProfileKnowledgeProvider {
|
|||
* @return a list of the resource names that are canonical resources defined for this version
|
||||
*/
|
||||
public List<String> getCanonicalResourceNames() {
|
||||
List<String> names = new ArrayList<>();
|
||||
if (canonicalResourceNames == null) {
|
||||
canonicalResourceNames = new ArrayList<>();
|
||||
Set<String> names = new HashSet<>();
|
||||
for (StructureDefinition sd : allStructures()) {
|
||||
if (sd.getKind() == StructureDefinitionKind.RESOURCE && !sd.getAbstract() && hasUrlProperty(sd)) {
|
||||
names.add(sd.getType());
|
||||
}
|
||||
}
|
||||
return names;
|
||||
canonicalResourceNames.addAll(Utilities.sorted(names));
|
||||
}
|
||||
return canonicalResourceNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of all structure definitions, with snapshots generated (if possible)
|
||||
*/
|
||||
public List<StructureDefinition> allStructures(){
|
||||
List<StructureDefinition> result = new ArrayList<StructureDefinition>();
|
||||
if (allStructuresList.isEmpty()) {
|
||||
Set<StructureDefinition> set = new HashSet<StructureDefinition>();
|
||||
for (StructureDefinition sd : getStructures()) {
|
||||
if (!set.contains(sd)) {
|
||||
|
@ -224,11 +232,12 @@ public class ContextUtilities implements ProfileKnowledgeProvider {
|
|||
}
|
||||
}
|
||||
}
|
||||
result.add(sd);
|
||||
allStructuresList.add(sd);
|
||||
set.add(sd);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return allStructuresList;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -368,5 +377,32 @@ public class ContextUtilities implements ProfileKnowledgeProvider {
|
|||
return null;
|
||||
}
|
||||
|
||||
public List<String> getConcreteResources() {
|
||||
if (concreteResourceNames == null) {
|
||||
concreteResourceNames = new ArrayList<>();
|
||||
Set<String> names = new HashSet<>();
|
||||
for (StructureDefinition sd : allStructures()) {
|
||||
if (sd.getKind() == StructureDefinitionKind.RESOURCE && !sd.getAbstract()) {
|
||||
names.add(sd.getType());
|
||||
}
|
||||
}
|
||||
concreteResourceNames.addAll(Utilities.sorted(names));
|
||||
}
|
||||
return concreteResourceNames;
|
||||
}
|
||||
|
||||
public List<StructureMap> listMaps(String url) {
|
||||
List<StructureMap> res = new ArrayList<>();
|
||||
String start = url.substring(0, url.indexOf("*"));
|
||||
String end = url.substring(url.indexOf("*")+1);
|
||||
for (StructureMap map : context.fetchResourcesByType(StructureMap.class)) {
|
||||
String u = map.getUrl();
|
||||
if (u.startsWith(start) && u.endsWith(end)) {
|
||||
res.add(map);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -321,6 +321,22 @@ public interface IWorkerContext {
|
|||
* @return
|
||||
*/
|
||||
List<CodeSystem> getCodeSystems();
|
||||
|
||||
/**
|
||||
* if this is true, then the loader will patch canonical URLs and cross-links
|
||||
* to add /X.X/ into the URL so that different versions can be loaded safely
|
||||
*
|
||||
* default is false
|
||||
*/
|
||||
void setPatchUrls(boolean value);
|
||||
|
||||
/**
|
||||
* patch the URL if necessary
|
||||
*
|
||||
* @param url
|
||||
* @return
|
||||
*/
|
||||
String patchUrl(String url, String resourceType);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -97,7 +97,7 @@ public class SimpleWorkerContext extends BaseWorkerContext implements IWorkerCon
|
|||
private final IContextResourceLoader loader;
|
||||
|
||||
public PackageResourceLoader(PackageResourceInformation pri, IContextResourceLoader loader) {
|
||||
super(pri.getResourceType(), pri.getId(), pri.getUrl(),pri.getVersion());
|
||||
super(pri.getResourceType(), pri.getId(), loader == null ? pri.getUrl() :loader.patchUrl(pri.getUrl(), pri.getResourceType()), pri.getVersion());
|
||||
this.filename = pri.getFilename();
|
||||
this.loader = loader;
|
||||
}
|
||||
|
|
|
@ -129,6 +129,7 @@ public class Element extends Base {
|
|||
private int instanceId;
|
||||
private boolean isNull;
|
||||
private Base source;
|
||||
private boolean ignorePropertyOrder;
|
||||
|
||||
public Element(String name) {
|
||||
super();
|
||||
|
@ -148,6 +149,9 @@ public class Element extends Base {
|
|||
super();
|
||||
this.name = name;
|
||||
this.property = property;
|
||||
if (property.isResource()) {
|
||||
children = new ArrayList<>();
|
||||
}
|
||||
}
|
||||
|
||||
public Element(String name, Property property, String type, String value) {
|
||||
|
@ -211,8 +215,9 @@ public class Element extends Base {
|
|||
this.value = value;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
public Element setType(String type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
|
||||
}
|
||||
|
||||
|
@ -286,10 +291,22 @@ public class Element extends Base {
|
|||
for (Element child : children) {
|
||||
if (name.equals(child.getName()))
|
||||
return child.getValue();
|
||||
}
|
||||
for (Element child : children) {
|
||||
if (name.equals(child.getNameBase()))
|
||||
return child.getValue();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private String getNameBase() {
|
||||
if (property.isChoice()) {
|
||||
return property.getName().replace("[x]", "");
|
||||
} else {
|
||||
return getName();
|
||||
}
|
||||
}
|
||||
|
||||
public void setChildValue(String name, String value) {
|
||||
if (children == null)
|
||||
children = new ArrayList<Element>();
|
||||
|
@ -538,6 +555,16 @@ public class Element extends Base {
|
|||
Element ne = new Element(name, p);
|
||||
children.add(ne);
|
||||
return ne;
|
||||
} else if (p.getDefinition().isChoice() && name.startsWith(p.getName().replace("[x]", ""))) {
|
||||
String type = name.substring(p.getName().length()-3);
|
||||
if (new ContextUtilities(property.getContext()).isPrimitiveDatatype(Utilities.uncapitalize(type))) {
|
||||
type = Utilities.uncapitalize(type);
|
||||
}
|
||||
Element ne = new Element(name, p);
|
||||
ne.setType(type);
|
||||
children.add(ne);
|
||||
return ne;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -598,7 +625,7 @@ public class Element extends Base {
|
|||
|
||||
@Override
|
||||
public String primitiveValue() {
|
||||
if (isPrimitive())
|
||||
if (isPrimitive() || value != null)
|
||||
return value;
|
||||
else {
|
||||
if (hasPrimitiveValue() && children != null) {
|
||||
|
@ -1361,5 +1388,18 @@ public class Element extends Base {
|
|||
return this;
|
||||
}
|
||||
|
||||
public boolean isIgnorePropertyOrder() {
|
||||
return ignorePropertyOrder;
|
||||
}
|
||||
|
||||
public void setIgnorePropertyOrder(boolean ignorePropertyOrder) {
|
||||
this.ignorePropertyOrder = ignorePropertyOrder;
|
||||
if (children != null) {
|
||||
for (Element e : children) {
|
||||
e.setIgnorePropertyOrder(ignorePropertyOrder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,585 @@
|
|||
package org.hl7.fhir.r5.elementmodel;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hl7.fhir.exceptions.DefinitionException;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.exceptions.FHIRFormatError;
|
||||
import org.hl7.fhir.r5.context.IWorkerContext;
|
||||
import org.hl7.fhir.r5.formats.IParser.OutputStyle;
|
||||
import org.hl7.fhir.r5.model.ExpressionNode;
|
||||
import org.hl7.fhir.r5.model.ConceptMap.ConceptMapGroupUnmappedMode;
|
||||
import org.hl7.fhir.r5.model.Enumerations.ConceptMapRelationship;
|
||||
import org.hl7.fhir.r5.model.Enumerations.PublicationStatus;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapGroupTypeMode;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapTransform;
|
||||
import org.hl7.fhir.r5.utils.FHIRLexer;
|
||||
import org.hl7.fhir.r5.utils.FHIRLexer.FHIRLexerException;
|
||||
import org.hl7.fhir.r5.utils.FHIRPathEngine;
|
||||
import org.hl7.fhir.r5.utils.structuremap.StructureMapUtilities;
|
||||
import org.hl7.fhir.utilities.SourceLocation;
|
||||
import org.hl7.fhir.utilities.TextFile;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueType;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.Source;
|
||||
|
||||
public class FmlParser extends ParserBase {
|
||||
|
||||
private FHIRPathEngine fpe;
|
||||
|
||||
public FmlParser(IWorkerContext context) {
|
||||
super(context);
|
||||
fpe = new FHIRPathEngine(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<NamedElement> parse(InputStream stream) throws IOException, FHIRFormatError, DefinitionException, FHIRException {
|
||||
String text = TextFile.streamToString(stream);
|
||||
List<NamedElement> result = new ArrayList<>();
|
||||
result.add(new NamedElement(null, parse(text)));
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compose(Element e, OutputStream destination, OutputStyle style, String base)
|
||||
throws FHIRException, IOException {
|
||||
throw new Error("Not done yet");
|
||||
}
|
||||
|
||||
public Element parse(String text) throws FHIRException {
|
||||
FHIRLexer lexer = new FHIRLexer(text, "source", true);
|
||||
if (lexer.done())
|
||||
throw lexer.error("Map Input cannot be empty");
|
||||
Element result = Manager.build(context, context.fetchTypeDefinition("StructureMap"));
|
||||
try {
|
||||
if (lexer.hasToken("map")) {
|
||||
lexer.token("map");
|
||||
result.makeElement("url").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("url"));
|
||||
lexer.token("=");
|
||||
result.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("name"));
|
||||
if (lexer.hasComments()) {
|
||||
result.makeElement("description").markLocation(lexer.getCurrentLocation()).setValue(lexer.getAllComments());
|
||||
}
|
||||
} else {
|
||||
while (lexer.hasToken("///")) {
|
||||
lexer.next();
|
||||
String fid = lexer.takeDottedToken();
|
||||
Element e = result.makeElement(fid).markLocation(lexer.getCurrentLocation());
|
||||
lexer.token("=");
|
||||
e.setValue(lexer.readConstant("meta value"));
|
||||
}
|
||||
}
|
||||
lexer.setMetadataFormat(false);
|
||||
while (lexer.hasToken("conceptmap"))
|
||||
parseConceptMap(result, lexer);
|
||||
|
||||
while (lexer.hasToken("uses"))
|
||||
parseUses(result, lexer);
|
||||
while (lexer.hasToken("imports"))
|
||||
parseImports(result, lexer);
|
||||
|
||||
while (!lexer.done()) {
|
||||
parseGroup(result, lexer);
|
||||
}
|
||||
} catch (FHIRLexerException e) {
|
||||
logError("2023-02-24", e.getLocation().getLine(), e.getLocation().getColumn(), "??", IssueType.INVALID, e.getMessage(), IssueSeverity.FATAL);
|
||||
} catch (Exception e) {
|
||||
logError("2023-02-24", -1, -1, "?", IssueType.INVALID, e.getMessage(), IssueSeverity.FATAL);
|
||||
}
|
||||
|
||||
if (!result.hasChild("status")) {
|
||||
result.makeElement("status").setValue("draft");
|
||||
}
|
||||
result.setIgnorePropertyOrder(true);
|
||||
return result;
|
||||
}
|
||||
|
||||
private void parseConceptMap(Element result, FHIRLexer lexer) throws FHIRLexerException {
|
||||
lexer.token("conceptmap");
|
||||
Element map = Manager.build(context, context.fetchTypeDefinition("ConceptMap"));
|
||||
Element eid = map.makeElement("id").markLocation(lexer.getCurrentLocation());
|
||||
String id = lexer.readConstant("map id");
|
||||
if (id.startsWith("#"))
|
||||
throw lexer.error("Concept Map identifier must start with #");
|
||||
eid.setValue(id);
|
||||
map.makeElement("status").setValue(PublicationStatus.DRAFT.toCode()); // todo: how to add this to the text format
|
||||
result.makeElement("contained").setElement("resource", map);
|
||||
lexer.token("{");
|
||||
// lexer.token("source");
|
||||
// map.setSource(new UriType(lexer.readConstant("source")));
|
||||
// lexer.token("target");
|
||||
// map.setSource(new UriType(lexer.readConstant("target")));
|
||||
Map<String, String> prefixes = new HashMap<String, String>();
|
||||
while (lexer.hasToken("prefix")) {
|
||||
lexer.token("prefix");
|
||||
String n = lexer.take();
|
||||
lexer.token("=");
|
||||
String v = lexer.readConstant("prefix url");
|
||||
prefixes.put(n, v);
|
||||
}
|
||||
while (lexer.hasToken("unmapped")) {
|
||||
lexer.token("unmapped");
|
||||
lexer.token("for");
|
||||
String n = readPrefix(prefixes, lexer);
|
||||
Element g = getGroupE(map, n, null);
|
||||
lexer.token("=");
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
String v = lexer.take();
|
||||
if (v.equals("provided")) {
|
||||
g.makeElement("unmapped").makeElement("mode").markLocation(loc).setValue(ConceptMapGroupUnmappedMode.USESOURCECODE.toCode());
|
||||
} else
|
||||
throw lexer.error("Only unmapped mode PROVIDED is supported at this time");
|
||||
}
|
||||
while (!lexer.hasToken("}")) {
|
||||
String srcs = readPrefix(prefixes, lexer);
|
||||
lexer.token(":");
|
||||
SourceLocation scloc = lexer.getCurrentLocation();
|
||||
String sc = lexer.getCurrent().startsWith("\"") ? lexer.readConstant("code") : lexer.take();
|
||||
SourceLocation relLoc = lexer.getCurrentLocation();
|
||||
ConceptMapRelationship rel = readRelationship(lexer);
|
||||
String tgts = readPrefix(prefixes, lexer);
|
||||
Element g = getGroupE(map, srcs, tgts);
|
||||
Element e = g.addElement("element");
|
||||
e.makeElement("code").markLocation(scloc).setValue(sc.startsWith("\"") ? lexer.processConstant(sc) : sc);
|
||||
Element tgt = e.addElement("target");
|
||||
tgt.makeElement("relationship").markLocation(relLoc).setValue(rel.toCode());
|
||||
lexer.token(":");
|
||||
tgt.makeElement("code").markLocation(lexer.getCurrentLocation()).setValue(lexer.getCurrent().startsWith("\"") ? lexer.readConstant("code") : lexer.take());
|
||||
if (lexer.hasComments()) {
|
||||
tgt.makeElement("comment").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
}
|
||||
lexer.token("}");
|
||||
}
|
||||
|
||||
private Element getGroupE(Element map, String srcs, String tgts) {
|
||||
for (Element grp : map.getChildrenByName("group")) {
|
||||
if (grp.getChildValue("source").equals(srcs)) {
|
||||
Element tgt = grp.getNamedChild("target");
|
||||
if (tgt == null || tgts == null || tgts.equals(tgt.getValue())) {
|
||||
if (tgt == null && tgts != null)
|
||||
grp.makeElement("target").setValue(tgts);
|
||||
return grp;
|
||||
}
|
||||
}
|
||||
}
|
||||
Element grp = map.addElement("group");
|
||||
grp.makeElement("source").setValue(srcs);
|
||||
grp.makeElement("target").setValue(tgts);
|
||||
return grp;
|
||||
}
|
||||
|
||||
private String readPrefix(Map<String, String> prefixes, FHIRLexer lexer) throws FHIRLexerException {
|
||||
String prefix = lexer.take();
|
||||
if (!prefixes.containsKey(prefix))
|
||||
throw lexer.error("Unknown prefix '" + prefix + "'");
|
||||
return prefixes.get(prefix);
|
||||
}
|
||||
|
||||
|
||||
private ConceptMapRelationship readRelationship(FHIRLexer lexer) throws FHIRLexerException {
|
||||
String token = lexer.take();
|
||||
if (token.equals("-"))
|
||||
return ConceptMapRelationship.RELATEDTO;
|
||||
if (token.equals("=="))
|
||||
return ConceptMapRelationship.EQUIVALENT;
|
||||
if (token.equals("!="))
|
||||
return ConceptMapRelationship.NOTRELATEDTO;
|
||||
if (token.equals("<="))
|
||||
return ConceptMapRelationship.SOURCEISNARROWERTHANTARGET;
|
||||
if (token.equals(">="))
|
||||
return ConceptMapRelationship.SOURCEISBROADERTHANTARGET;
|
||||
throw lexer.error("Unknown relationship token '" + token + "'");
|
||||
}
|
||||
|
||||
private void parseUses(Element result, FHIRLexer lexer) throws FHIRException {
|
||||
lexer.token("uses");
|
||||
Element st = result.addElement("structure");
|
||||
st.makeElement("url").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("url"));
|
||||
if (lexer.hasToken("alias")) {
|
||||
lexer.token("alias");
|
||||
st.makeElement("alias").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
lexer.token("as");
|
||||
st.makeElement("mode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
lexer.skipToken(";");
|
||||
if (lexer.hasComments()) {
|
||||
st.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void parseImports(Element result, FHIRLexer lexer) throws FHIRException {
|
||||
lexer.token("imports");
|
||||
result.addElement("import").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("url"));
|
||||
lexer.skipToken(";");
|
||||
}
|
||||
|
||||
private void parseGroup(Element result, FHIRLexer lexer) throws FHIRException {
|
||||
SourceLocation commLoc = lexer.getCommentLocation();
|
||||
String comment = lexer.getAllComments();
|
||||
lexer.token("group");
|
||||
Element group = result.addElement("group").markLocation(lexer.getCurrentLocation());
|
||||
if (!Utilities.noString(comment)) {
|
||||
group.makeElement("documentation").markLocation(commLoc).setValue(comment);
|
||||
}
|
||||
boolean newFmt = false;
|
||||
if (lexer.hasToken("for")) {
|
||||
lexer.token("for");
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
if ("type".equals(lexer.getCurrent())) {
|
||||
lexer.token("type");
|
||||
lexer.token("+");
|
||||
lexer.token("types");
|
||||
group.makeElement("typeMode").markLocation(loc).setValue(StructureMapGroupTypeMode.TYPEANDTYPES.toCode());
|
||||
} else {
|
||||
lexer.token("types");
|
||||
group.makeElement("typeMode").markLocation(loc).setValue(StructureMapGroupTypeMode.TYPES.toCode());
|
||||
}
|
||||
}
|
||||
group.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (lexer.hasToken("(")) {
|
||||
newFmt = true;
|
||||
lexer.take();
|
||||
while (!lexer.hasToken(")")) {
|
||||
parseInput(group, lexer, true);
|
||||
if (lexer.hasToken(","))
|
||||
lexer.token(",");
|
||||
}
|
||||
lexer.take();
|
||||
}
|
||||
if (lexer.hasToken("extends")) {
|
||||
lexer.next();
|
||||
group.makeElement("extends").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (newFmt) {
|
||||
if (lexer.hasToken("<")) {
|
||||
lexer.token("<");
|
||||
lexer.token("<");
|
||||
if (lexer.hasToken("types")) {
|
||||
group.makeElement("typeMode").markLocation(lexer.getCurrentLocation()).setValue(StructureMapGroupTypeMode.TYPES.toCode());
|
||||
} else {
|
||||
group.makeElement("typeMode").markLocation(lexer.getCurrentLocation()).setValue(StructureMapGroupTypeMode.TYPEANDTYPES.toCode());
|
||||
lexer.token("type");
|
||||
lexer.token("+");
|
||||
}
|
||||
lexer.token(">");
|
||||
lexer.token(">");
|
||||
}
|
||||
lexer.token("{");
|
||||
}
|
||||
if (newFmt) {
|
||||
while (!lexer.hasToken("}")) {
|
||||
if (lexer.done())
|
||||
throw lexer.error("premature termination expecting 'endgroup'");
|
||||
parseRule(result, group, lexer, true);
|
||||
}
|
||||
} else {
|
||||
while (lexer.hasToken("input"))
|
||||
parseInput(group, lexer, false);
|
||||
while (!lexer.hasToken("endgroup")) {
|
||||
if (lexer.done())
|
||||
throw lexer.error("premature termination expecting 'endgroup'");
|
||||
parseRule(result, group, lexer, false);
|
||||
}
|
||||
}
|
||||
lexer.next();
|
||||
if (newFmt && lexer.hasToken(";"))
|
||||
lexer.next();
|
||||
}
|
||||
|
||||
|
||||
private void parseRule(Element map, Element context, FHIRLexer lexer, boolean newFmt) throws FHIRException {
|
||||
Element rule = context.addElement("rule").markLocation(lexer.getCurrentLocation());
|
||||
if (!newFmt) {
|
||||
rule.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.takeDottedToken());
|
||||
lexer.token(":");
|
||||
lexer.token("for");
|
||||
} else {
|
||||
if (lexer.hasComments()) {
|
||||
rule.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
}
|
||||
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
parseSource(rule, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
if ((newFmt && lexer.hasToken("->")) || (!newFmt && lexer.hasToken("make"))) {
|
||||
lexer.token(newFmt ? "->" : "make");
|
||||
done = false;
|
||||
while (!done) {
|
||||
parseTarget(rule, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
}
|
||||
if (lexer.hasToken("then")) {
|
||||
lexer.token("then");
|
||||
if (lexer.hasToken("{")) {
|
||||
lexer.token("{");
|
||||
while (!lexer.hasToken("}")) {
|
||||
if (lexer.done())
|
||||
throw lexer.error("premature termination expecting '}' in nested group");
|
||||
parseRule(map, rule, lexer, newFmt);
|
||||
}
|
||||
lexer.token("}");
|
||||
} else {
|
||||
done = false;
|
||||
while (!done) {
|
||||
parseRuleReference(rule, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!rule.hasChild("documentation") && lexer.hasComments()) {
|
||||
rule.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
|
||||
if (isSimpleSyntax(rule)) {
|
||||
rule.forceElement("source").makeElement("variable").setValue(StructureMapUtilities.AUTO_VAR_NAME);
|
||||
rule.forceElement("target").makeElement("variable").setValue(StructureMapUtilities.AUTO_VAR_NAME);
|
||||
rule.forceElement("target").makeElement("transform").setValue(StructureMapTransform.CREATE.toCode());
|
||||
Element dep = rule.forceElement("dependent");
|
||||
dep.makeElement("name").setValue(StructureMapUtilities.DEF_GROUP_NAME);
|
||||
dep.makeElement("parameter").makeElement("valueId").setValue(StructureMapUtilities.AUTO_VAR_NAME);
|
||||
dep.makeElement("parameter").makeElement("valueId").setValue(StructureMapUtilities.AUTO_VAR_NAME);
|
||||
// no dependencies - imply what is to be done based on types
|
||||
}
|
||||
if (newFmt) {
|
||||
if (lexer.isConstant()) {
|
||||
if (lexer.isStringConstant()) {
|
||||
rule.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("ruleName"));
|
||||
} else {
|
||||
rule.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
} else {
|
||||
if (rule.getChildrenByName("source").size() != 1 || !rule.getChildrenByName("source").get(0).hasChild("element"))
|
||||
throw lexer.error("Complex rules must have an explicit name");
|
||||
if (rule.getChildrenByName("source").get(0).hasChild("type"))
|
||||
rule.makeElement("name").setValue(rule.getChildrenByName("source").get(0).getNamedChildValue("element") + rule.getChildrenByName("source").get(0).getNamedChildValue("type"));
|
||||
else
|
||||
rule.makeElement("name").setValue(rule.getChildrenByName("source").get(0).getNamedChildValue("element"));
|
||||
}
|
||||
lexer.token(";");
|
||||
}
|
||||
}
|
||||
|
||||
private void parseRuleReference(Element rule, FHIRLexer lexer) throws FHIRLexerException {
|
||||
Element ref = rule.addElement("dependent");
|
||||
ref.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
lexer.token("(");
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
parseParameter(ref, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
lexer.token(")");
|
||||
}
|
||||
|
||||
private void parseSource(Element rule, FHIRLexer lexer) throws FHIRException {
|
||||
Element source = rule.addElement("source").markLocation(lexer.getCurrentLocation());
|
||||
source.makeElement("context").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (source.getChildValue("context").equals("search") && lexer.hasToken("(")) {
|
||||
source.makeElement("context").markLocation(lexer.getCurrentLocation()).setValue("@search");
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(StructureMapUtilities.MAP_SEARCH_EXPRESSION, node);
|
||||
source.makeElement("element").markLocation(loc).setValue(node.toString());
|
||||
lexer.token(")");
|
||||
} else if (lexer.hasToken(".")) {
|
||||
lexer.token(".");
|
||||
source.makeElement("element").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (lexer.hasToken(":")) {
|
||||
// type and cardinality
|
||||
lexer.token(":");
|
||||
source.makeElement("type").markLocation(lexer.getCurrentLocation()).setValue(lexer.takeDottedToken());
|
||||
}
|
||||
if (Utilities.isInteger(lexer.getCurrent())) {
|
||||
source.makeElement("min").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
lexer.token("..");
|
||||
source.makeElement("max").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (lexer.hasToken("default")) {
|
||||
lexer.token("default");
|
||||
source.makeElement("defaultValue").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("default value"));
|
||||
}
|
||||
if (Utilities.existsInList(lexer.getCurrent(), "first", "last", "not_first", "not_last", "only_one")) {
|
||||
source.makeElement("listMode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
|
||||
if (lexer.hasToken("as")) {
|
||||
lexer.take();
|
||||
source.makeElement("variable").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (lexer.hasToken("where")) {
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(StructureMapUtilities.MAP_WHERE_EXPRESSION, node);
|
||||
source.makeElement("condition").markLocation(loc).setValue(node.toString());
|
||||
}
|
||||
if (lexer.hasToken("check")) {
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(StructureMapUtilities.MAP_WHERE_CHECK, node);
|
||||
source.makeElement("check").markLocation(loc).setValue(node.toString());
|
||||
}
|
||||
if (lexer.hasToken("log")) {
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(StructureMapUtilities.MAP_WHERE_CHECK, node);
|
||||
source.makeElement("logMessage").markLocation(loc).setValue(lexer.take());
|
||||
}
|
||||
}
|
||||
|
||||
private void parseTarget(Element rule, FHIRLexer lexer) throws FHIRException {
|
||||
Element target = rule.addElement("target").markLocation(lexer.getCurrentLocation());
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
String start = lexer.take();
|
||||
if (lexer.hasToken(".")) {
|
||||
target.makeElement("context").markLocation(loc).setValue(start);
|
||||
start = null;
|
||||
lexer.token(".");
|
||||
target.makeElement("element").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
String name;
|
||||
boolean isConstant = false;
|
||||
if (lexer.hasToken("=")) {
|
||||
if (start != null) {
|
||||
target.makeElement("context").markLocation(loc).setValue(start);
|
||||
}
|
||||
lexer.token("=");
|
||||
isConstant = lexer.isConstant();
|
||||
loc = lexer.getCurrentLocation();
|
||||
name = lexer.take();
|
||||
} else {
|
||||
loc = lexer.getCurrentLocation();
|
||||
name = start;
|
||||
}
|
||||
|
||||
if ("(".equals(name)) {
|
||||
// inline fluentpath expression
|
||||
target.makeElement("transform").markLocation(lexer.getCurrentLocation()).setValue(StructureMapTransform.EVALUATE.toCode());
|
||||
loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
target.setUserData(StructureMapUtilities.MAP_EXPRESSION, node);
|
||||
target.addElement("parameter").markLocation(loc).makeElement("valueString").setValue(node.toString());
|
||||
lexer.token(")");
|
||||
} else if (lexer.hasToken("(")) {
|
||||
target.makeElement("transform").markLocation(loc).setValue(name);
|
||||
lexer.token("(");
|
||||
if (target.getChildValue("transform").equals(StructureMapTransform.EVALUATE.toCode())) {
|
||||
parseParameter(target, lexer);
|
||||
lexer.token(",");
|
||||
loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
target.setUserData(StructureMapUtilities.MAP_EXPRESSION, node);
|
||||
target.addElement("parameter").markLocation(loc).setValue(node.toString());
|
||||
} else {
|
||||
while (!lexer.hasToken(")")) {
|
||||
parseParameter(target, lexer);
|
||||
if (!lexer.hasToken(")"))
|
||||
lexer.token(",");
|
||||
}
|
||||
}
|
||||
lexer.token(")");
|
||||
} else if (name != null) {
|
||||
target.makeElement("transform").markLocation(loc).setValue(StructureMapTransform.COPY.toCode());
|
||||
if (!isConstant) {
|
||||
loc = lexer.getCurrentLocation();
|
||||
String id = name;
|
||||
while (lexer.hasToken(".")) {
|
||||
id = id + lexer.take() + lexer.take();
|
||||
}
|
||||
target.addElement("parameter").markLocation(loc).setValue(id);
|
||||
} else {
|
||||
target.addElement("parameter").markLocation(lexer.getCurrentLocation()).setValue(readConstant(name, lexer));
|
||||
}
|
||||
}
|
||||
if (lexer.hasToken("as")) {
|
||||
lexer.take();
|
||||
target.makeElement("variable").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
while (Utilities.existsInList(lexer.getCurrent(), "first", "last", "share", "collate")) {
|
||||
if (lexer.getCurrent().equals("share")) {
|
||||
target.makeElement("listMode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
target.makeElement("listRuleId").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
} else {
|
||||
target.makeElement("listMode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void parseParameter(Element ref, FHIRLexer lexer) throws FHIRLexerException, FHIRFormatError {
|
||||
if (!lexer.isConstant()) {
|
||||
ref.addElement("parameter").markLocation(lexer.getCurrentLocation()).makeElement("valueId").setValue(lexer.take());
|
||||
} else if (lexer.isStringConstant())
|
||||
ref.addElement("parameter").markLocation(lexer.getCurrentLocation()).makeElement("valueString").setValue(lexer.readConstant("??"));
|
||||
else {
|
||||
ref.addElement("parameter").markLocation(lexer.getCurrentLocation()).makeElement("valueString").setValue(readConstant(lexer.take(), lexer));
|
||||
}
|
||||
}
|
||||
|
||||
private void parseInput(Element group, FHIRLexer lexer, boolean newFmt) throws FHIRException {
|
||||
Element input = group.addElement("input");
|
||||
if (newFmt) {
|
||||
input.makeElement("mode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
} else
|
||||
lexer.token("input");
|
||||
input.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (lexer.hasToken(":")) {
|
||||
lexer.token(":");
|
||||
input.makeElement("type").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (!newFmt) {
|
||||
lexer.token("as");
|
||||
input.makeElement("mode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (lexer.hasComments()) {
|
||||
input.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
lexer.skipToken(";");
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSimpleSyntax(Element rule) {
|
||||
return
|
||||
(rule.getChildren("source").size() == 1 && rule.getChildren("source").get(0).hasChild("context") && rule.getChildren("source").get(0).hasChild("element") && !rule.getChildren("source").get(0).hasChild("variable")) &&
|
||||
(rule.getChildren("target").size() == 1 && rule.getChildren("target").get(0).hasChild("context") && rule.getChildren("target").get(0).hasChild("element") && !rule.getChildren("target").get(0).hasChild("variable") &&
|
||||
!rule.getChildren("target").get(0).hasChild("parameter")) &&
|
||||
(rule.getChildren("dependent").size() == 0 && rule.getChildren("rule").size() == 0);
|
||||
}
|
||||
|
||||
private String readConstant(String s, FHIRLexer lexer) throws FHIRLexerException {
|
||||
if (Utilities.isInteger(s))
|
||||
return s;
|
||||
else if (Utilities.isDecimal(s, false))
|
||||
return s;
|
||||
else if (Utilities.existsInList(s, "true", "false"))
|
||||
return s;
|
||||
else
|
||||
return lexer.processConstant(s);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -48,7 +48,7 @@ import org.hl7.fhir.r5.model.StructureDefinition;
|
|||
public class Manager {
|
||||
|
||||
//TODO use EnumMap
|
||||
public enum FhirFormat { XML, JSON, TURTLE, TEXT, VBAR, SHC;
|
||||
public enum FhirFormat { XML, JSON, TURTLE, TEXT, VBAR, SHC, FML;
|
||||
// SHC = smart health cards, including as text versions of QR codes
|
||||
|
||||
public String getExtension() {
|
||||
|
@ -65,6 +65,8 @@ public class Manager {
|
|||
return "hl7";
|
||||
case SHC:
|
||||
return "shc";
|
||||
case FML:
|
||||
return "fml";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -81,6 +83,8 @@ public class Manager {
|
|||
return TEXT;
|
||||
case "hl7":
|
||||
return VBAR;
|
||||
case "fml":
|
||||
return FML;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -115,6 +119,7 @@ public class Manager {
|
|||
case TURTLE : return new TurtleParser(context);
|
||||
case VBAR : return new VerticalBarParser(context);
|
||||
case SHC : return new SHCParser(context);
|
||||
case FML : return new FmlParser(context);
|
||||
case TEXT : throw new Error("Programming logic error: do not call makeParser for a text resource");
|
||||
}
|
||||
return null;
|
||||
|
@ -123,6 +128,7 @@ public class Manager {
|
|||
public static Element build(IWorkerContext context, StructureDefinition sd) {
|
||||
Property p = new Property(context, sd.getSnapshot().getElementFirstRep(), sd);
|
||||
Element e = new Element(p.getName(), p);
|
||||
e.setPath(sd.getType());
|
||||
return e;
|
||||
}
|
||||
|
||||
|
|
|
@ -270,9 +270,10 @@ public class Property {
|
|||
String tc = definition.getType().get(0).getCode();
|
||||
return definition.getType().size() == 1 && (("Resource".equals(tc) || "DomainResource".equals(tc)) || Utilities.existsInList(tc, context.getResourceNames()));
|
||||
}
|
||||
else
|
||||
else {
|
||||
return !definition.getPath().contains(".") && (structure.getKind() == StructureDefinitionKind.RESOURCE);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isList() {
|
||||
return !"1".equals(definition.getMax());
|
||||
|
|
|
@ -333,14 +333,12 @@ public class TurtleParser extends ParserBase {
|
|||
}
|
||||
String subjId = genSubjectId(e);
|
||||
|
||||
String ontologyId = subjId.replace(">", ".ttl>");
|
||||
Section ontology = ttl.section("ontology header");
|
||||
ontology.triple(ontologyId, "a", "owl:Ontology");
|
||||
ontology.triple(ontologyId, "owl:imports", "fhir:fhir.ttl");
|
||||
if(ontologyId.startsWith("<" + FHIR_URI_BASE))
|
||||
ontology.triple(ontologyId, "owl:versionIRI", ontologyId.replace(FHIR_URI_BASE, FHIR_VERSION_BASE));
|
||||
Subject subject;
|
||||
if (hasModifierExtension(e))
|
||||
subject = section.triple(subjId, "a", "fhir:_" + e.getType());
|
||||
else
|
||||
subject = section.triple(subjId, "a", "fhir:" + e.getType());
|
||||
|
||||
Subject subject = section.triple(subjId, "a", "fhir:" + e.getType());
|
||||
subject.linkedPredicate("fhir:nodeRole", "fhir:treeRoot", linkResolver == null ? null : linkResolver.resolvePage("rdf.html#tree-root"), null);
|
||||
|
||||
for (Element child : e.getChildren()) {
|
||||
|
@ -349,6 +347,10 @@ public class TurtleParser extends ParserBase {
|
|||
|
||||
}
|
||||
|
||||
private boolean hasModifierExtension(Element e) {
|
||||
return e.getChildren().stream().anyMatch(p -> p.getName().equals("modifierExtension"));
|
||||
}
|
||||
|
||||
protected String getURIType(String uri) {
|
||||
if(uri.startsWith("<" + FHIR_URI_BASE))
|
||||
if(uri.substring(FHIR_URI_BASE.length() + 1).contains("/"))
|
||||
|
@ -415,17 +417,18 @@ public class TurtleParser extends ParserBase {
|
|||
Complex t;
|
||||
if (element.getSpecial() == SpecialElement.BUNDLE_ENTRY && parent != null && parent.getNamedChildValue("fullUrl") != null) {
|
||||
String url = "<"+parent.getNamedChildValue("fullUrl")+">";
|
||||
ctxt.linkedPredicate("fhir:"+en, url, linkResolver == null ? null : linkResolver.resolveProperty(element.getProperty()), comment);
|
||||
ctxt.linkedPredicate("fhir:"+en, url, linkResolver == null ? null : linkResolver.resolveProperty(element.getProperty()), comment, element.getProperty().isList());
|
||||
t = section.subject(url);
|
||||
} else {
|
||||
t = ctxt.linkedPredicate("fhir:"+en, linkResolver == null ? null : linkResolver.resolveProperty(element.getProperty()), comment);
|
||||
t = ctxt.linkedPredicate("fhir:"+en, linkResolver == null ? null : linkResolver.resolveProperty(element.getProperty()), comment, element.getProperty().isList());
|
||||
}
|
||||
if (element.getProperty().getName().endsWith("[x]") && !element.hasValue()) {
|
||||
t.linkedPredicate("a", "fhir:" + element.fhirType(), linkResolver == null ? null : linkResolver.resolveType(element.fhirType()), null);
|
||||
}
|
||||
if (element.getSpecial() != null)
|
||||
t.linkedPredicate("a", "fhir:"+element.fhirType(), linkResolver == null ? null : linkResolver.resolveType(element.fhirType()), null);
|
||||
if (element.hasValue())
|
||||
t.linkedPredicate("fhir:value", ttlLiteral(element.getValue(), element.getType()), linkResolver == null ? null : linkResolver.resolveType(element.getType()), null);
|
||||
if (element.getProperty().isList() && (!element.isResource() || element.getSpecial() == SpecialElement.CONTAINED))
|
||||
t.linkedPredicate("fhir:index", Integer.toString(element.getIndex()), linkResolver == null ? null : linkResolver.resolvePage("rdf.html#index"), null);
|
||||
t.linkedPredicate("fhir:v", ttlLiteral(element.getValue(), element.getType()), linkResolver == null ? null : linkResolver.resolveType(element.getType()), null);
|
||||
|
||||
if ("Coding".equals(element.getType()))
|
||||
decorateCoding(t, element, section);
|
||||
|
@ -463,37 +466,27 @@ public class TurtleParser extends ParserBase {
|
|||
|
||||
private String getFormalName(Element element) {
|
||||
String en = null;
|
||||
if (element.getSpecial() == null) {
|
||||
if (element.getProperty().getDefinition().hasBase())
|
||||
en = element.getProperty().getDefinition().getBase().getPath();
|
||||
}
|
||||
if (element.getSpecial() == null)
|
||||
en = element.getProperty().getName();
|
||||
else if (element.getSpecial() == SpecialElement.BUNDLE_ENTRY)
|
||||
en = "Bundle.entry.resource";
|
||||
en = "resource";
|
||||
else if (element.getSpecial() == SpecialElement.BUNDLE_OUTCOME)
|
||||
en = "Bundle.entry.response.outcome";
|
||||
en = "outcome";
|
||||
else if (element.getSpecial() == SpecialElement.PARAMETER)
|
||||
en = element.getElementProperty().getDefinition().getPath();
|
||||
else // CONTAINED
|
||||
en = "DomainResource.contained";
|
||||
en = "contained";
|
||||
|
||||
if (en == null)
|
||||
en = element.getProperty().getDefinition().getPath();
|
||||
boolean doType = false;
|
||||
if (en.endsWith("[x]")) {
|
||||
en = en.substring(0, en.length()-3);
|
||||
doType = true;
|
||||
}
|
||||
if (doType || (element.getProperty().getDefinition().getType().size() > 1 && !allReference(element.getProperty().getDefinition().getType())))
|
||||
en = en + Utilities.capitalize(element.getType());
|
||||
return en;
|
||||
}
|
||||
en = element.getProperty().getName();
|
||||
|
||||
private boolean allReference(List<TypeRefComponent> types) {
|
||||
for (TypeRefComponent t : types) {
|
||||
if (!t.getCode().equals("Reference"))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
if (en.endsWith("[x]"))
|
||||
en = en.substring(0, en.length()-3);
|
||||
|
||||
if (hasModifierExtension(element))
|
||||
return "_" + en;
|
||||
else
|
||||
return en;
|
||||
}
|
||||
|
||||
static public String ttlLiteral(String value, String type) {
|
||||
|
@ -550,10 +543,14 @@ public class TurtleParser extends ParserBase {
|
|||
else
|
||||
t.linkedPredicate("a", "sct:" + urlescape(code), null, null);
|
||||
} else if ("http://loinc.org".equals(system)) {
|
||||
t.prefix("loinc", "http://loinc.org/rdf#");
|
||||
t.prefix("loinc", "https://loinc.org/rdf/");
|
||||
t.linkedPredicate("a", "loinc:"+urlescape(code).toUpperCase(), null, null);
|
||||
} else if ("https://www.nlm.nih.gov/mesh".equals(system)) {
|
||||
t.prefix("mesh", "http://id.nlm.nih.gov/mesh/");
|
||||
t.linkedPredicate("a", "mesh:"+urlescape(code), null, null);
|
||||
}
|
||||
}
|
||||
|
||||
private void generateLinkedPredicate(Complex t, String code) throws FHIRException {
|
||||
Expression expression = SnomedExpressions.parse(code);
|
||||
|
||||
|
|
|
@ -81,6 +81,10 @@ public abstract class FormatUtilities {
|
|||
public static final String NS_XSI = "http://www.w3.org/2001/XMLSchema-instance";
|
||||
private static final int MAX_SCAN_LENGTH = 1000; // how many characters to scan into content when autodetermining format
|
||||
|
||||
public static final String MAP_ATTRIBUTE_NAME = "mapAttribute";
|
||||
public static final String PROPERTY_NAME = "property";
|
||||
public static String WORKING_CM_PROP_NAME = PROPERTY_NAME;
|
||||
|
||||
protected String toString(String value) {
|
||||
return value;
|
||||
}
|
||||
|
|
|
@ -9418,10 +9418,10 @@ public class JsonParser extends JsonParserBase {
|
|||
|
||||
protected void parseConceptMapOtherElementComponentProperties(JsonObject json, ConceptMap.OtherElementComponent res) throws IOException, FHIRFormatError {
|
||||
parseBackboneElementProperties(json, res);
|
||||
if (json.has("property"))
|
||||
res.setPropertyElement(parseUri(json.get("property").getAsString()));
|
||||
if (json.has("_property"))
|
||||
parseElementProperties(getJObject(json, "_property"), res.getPropertyElement());
|
||||
if (json.has(FormatUtilities.WORKING_CM_PROP_NAME))
|
||||
res.setPropertyElement(parseUri(json.get(FormatUtilities.WORKING_CM_PROP_NAME).getAsString()));
|
||||
if (json.has("_"+FormatUtilities.WORKING_CM_PROP_NAME))
|
||||
parseElementProperties(getJObject(json, "_"+FormatUtilities.WORKING_CM_PROP_NAME), res.getPropertyElement());
|
||||
DataType value = parseType("value", json);
|
||||
if (value != null)
|
||||
res.setValue(value);
|
||||
|
@ -44920,8 +44920,8 @@ public class JsonParser extends JsonParserBase {
|
|||
protected void composeOtherElementComponentProperties(ConceptMap.OtherElementComponent element) throws IOException {
|
||||
composeBackboneElementProperties(element);
|
||||
if (element.hasPropertyElement()) {
|
||||
composeUriCore("property", element.getPropertyElement(), false);
|
||||
composeUriExtras("property", element.getPropertyElement(), false);
|
||||
composeUriCore(FormatUtilities.WORKING_CM_PROP_NAME, element.getPropertyElement(), false);
|
||||
composeUriExtras(FormatUtilities.WORKING_CM_PROP_NAME, element.getPropertyElement(), false);
|
||||
}
|
||||
if (element.hasValue()) {
|
||||
composeType("value", element.getValue());
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -122,10 +122,10 @@ public abstract class RdfParserBase extends ParserBase implements IParser {
|
|||
return;
|
||||
if ("http://snomed.info/sct".equals(element.getSystem())) {
|
||||
t.prefix("sct", "http://snomed.info/sct/");
|
||||
t.predicate("a", "sct:"+element.getCode());
|
||||
t.predicate("a", "sct:"+element.getCode(), false);
|
||||
} else if ("http://snomed.info/sct".equals(element.getSystem())) {
|
||||
t.prefix("loinc", "http://loinc.org/rdf#");
|
||||
t.predicate("a", "loinc:"+element.getCode());
|
||||
t.predicate("a", "loinc:"+element.getCode(), false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8204,7 +8204,7 @@ public class XmlParser extends XmlParserBase {
|
|||
}
|
||||
|
||||
protected boolean parseConceptMapOtherElementComponentContent(int eventType, XmlPullParser xpp, ConceptMap.OtherElementComponent res) throws XmlPullParserException, IOException, FHIRFormatError {
|
||||
if (eventType == XmlPullParser.START_TAG && xpp.getName().equals("property")) {
|
||||
if (eventType == XmlPullParser.START_TAG && xpp.getName().equals(FormatUtilities.WORKING_CM_PROP_NAME)) {
|
||||
res.setPropertyElement(parseUri(xpp));
|
||||
} else if (eventType == XmlPullParser.START_TAG && nameIsTypeName(xpp, "value")) {
|
||||
res.setValue(parseType("value", xpp));
|
||||
|
@ -38972,7 +38972,7 @@ public class XmlParser extends XmlParserBase {
|
|||
protected void composeConceptMapOtherElementComponentElements(ConceptMap.OtherElementComponent element) throws IOException {
|
||||
composeBackboneElementElements(element);
|
||||
if (element.hasPropertyElement()) {
|
||||
composeUri("property", element.getPropertyElement());
|
||||
composeUri(FormatUtilities.WORKING_CM_PROP_NAME, element.getPropertyElement());
|
||||
}
|
||||
if (element.hasValue()) {
|
||||
composeType("value", element.getValue());
|
||||
|
|
|
@ -34,7 +34,11 @@ package org.hl7.fhir.r5.model;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.hl7.fhir.r5.model.Enumerations.*;
|
||||
import org.hl7.fhir.r5.utils.ToolingExtensions;
|
||||
import org.hl7.fhir.utilities.StandardsStatus;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
|
@ -509,6 +513,15 @@ public void checkNoModifiers(String noun, String verb) throws FHIRException {
|
|||
}
|
||||
return Collections.unmodifiableList(retVal);
|
||||
}
|
||||
|
||||
public StandardsStatus getStandardsStatus() {
|
||||
return ToolingExtensions.getStandardsStatus(this);
|
||||
}
|
||||
|
||||
public void setStandardsStatus(StandardsStatus status) {
|
||||
ToolingExtensions.setStandardsStatus(this, status, null);
|
||||
}
|
||||
|
||||
// end addition
|
||||
|
||||
}
|
||||
|
|
|
@ -34,8 +34,11 @@ package org.hl7.fhir.r5.model;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.hl7.fhir.utilities.StandardsStatus;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.r5.model.Enumerations.*;
|
||||
import org.hl7.fhir.r5.utils.ToolingExtensions;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatypeElement;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
|
@ -430,6 +433,15 @@ public abstract class Element extends Base implements IBaseHasExtensions, IBaseE
|
|||
return ext.get(0).getValue().primitiveValue();
|
||||
}
|
||||
|
||||
public StandardsStatus getStandardsStatus() {
|
||||
return ToolingExtensions.getStandardsStatus(this);
|
||||
}
|
||||
|
||||
public void setStandardsStatus(StandardsStatus status) {
|
||||
ToolingExtensions.setStandardsStatus(this, status, null);
|
||||
}
|
||||
|
||||
|
||||
// end addition
|
||||
|
||||
}
|
||||
|
|
|
@ -13521,6 +13521,9 @@ If a pattern[x] is declared on a repeating element, the pattern applies to all r
|
|||
return hasPath() ? getPath().contains(".") ? getPath().substring(getPath().lastIndexOf(".")+1) : getPath() : null;
|
||||
}
|
||||
|
||||
public String getNameBase() {
|
||||
return getName().replace("[x]", "");
|
||||
}
|
||||
public boolean unbounded() {
|
||||
return getMax().equals("*") || Integer.parseInt(getMax()) > 1;
|
||||
}
|
||||
|
@ -13554,6 +13557,8 @@ If a pattern[x] is declared on a repeating element, the pattern applies to all r
|
|||
return getMin() == 1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// end addition
|
||||
|
||||
}
|
||||
|
|
|
@ -115,10 +115,13 @@ public class TypeDetails {
|
|||
public boolean isSystemType() {
|
||||
return uri.startsWith(FP_NS);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private List<ProfiledType> types = new ArrayList<ProfiledType>();
|
||||
private CollectionStatus collectionStatus;
|
||||
private Set<String> targets; // or, not and, canonical urls
|
||||
|
||||
public TypeDetails(CollectionStatus collectionStatus, String... names) {
|
||||
super();
|
||||
this.collectionStatus = collectionStatus;
|
||||
|
@ -151,6 +154,7 @@ public class TypeDetails {
|
|||
addType(pt);
|
||||
return res;
|
||||
}
|
||||
|
||||
public void addType(ProfiledType pt) {
|
||||
for (ProfiledType et : types) {
|
||||
if (et.uri.equals(pt.uri)) {
|
||||
|
@ -176,6 +180,28 @@ public class TypeDetails {
|
|||
types.add(pt);
|
||||
}
|
||||
|
||||
public void addType(CollectionStatus status, ProfiledType pt) {
|
||||
addType(pt);
|
||||
if (collectionStatus == null) {
|
||||
collectionStatus = status;
|
||||
} else {
|
||||
switch (status) {
|
||||
case ORDERED:
|
||||
if (collectionStatus == CollectionStatus.SINGLETON) {
|
||||
collectionStatus = status;
|
||||
}
|
||||
break;
|
||||
case SINGLETON:
|
||||
break;
|
||||
case UNORDERED:
|
||||
collectionStatus = status;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void addTypes(Collection<String> names) {
|
||||
for (String n : names)
|
||||
addType(new ProfiledType(n));
|
||||
|
@ -245,7 +271,14 @@ public class TypeDetails {
|
|||
collectionStatus = source.collectionStatus;
|
||||
else
|
||||
collectionStatus = CollectionStatus.ORDERED;
|
||||
if (source.targets != null) {
|
||||
if (targets == null) {
|
||||
targets = new HashSet<>();
|
||||
}
|
||||
targets.addAll(source.targets);
|
||||
}
|
||||
}
|
||||
|
||||
public TypeDetails union(TypeDetails right) {
|
||||
TypeDetails result = new TypeDetails(null);
|
||||
if (right.collectionStatus == CollectionStatus.UNORDERED || collectionStatus == CollectionStatus.UNORDERED)
|
||||
|
@ -256,6 +289,16 @@ public class TypeDetails {
|
|||
result.addType(pt);
|
||||
for (ProfiledType pt : right.types)
|
||||
result.addType(pt);
|
||||
if (targets != null || right.targets != null) {
|
||||
result.targets = new HashSet<>();
|
||||
if (targets != null) {
|
||||
result.targets.addAll(targets);
|
||||
}
|
||||
if (right.targets != null) {
|
||||
result.targets.addAll(right.targets);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -274,6 +317,15 @@ public class TypeDetails {
|
|||
}
|
||||
for (ProfiledType pt : right.types)
|
||||
result.addType(pt);
|
||||
if (targets != null && right.targets != null) {
|
||||
result.targets = new HashSet<>();
|
||||
for (String s : targets) {
|
||||
if (right.targets.contains(s)) {
|
||||
result.targets.add(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -360,4 +412,30 @@ public class TypeDetails {
|
|||
}
|
||||
|
||||
|
||||
public void addTarget(String url) {
|
||||
if (targets == null) {
|
||||
targets = new HashSet<>();
|
||||
}
|
||||
targets.add(url);
|
||||
}
|
||||
public Set<String> getTargets() {
|
||||
return targets;
|
||||
}
|
||||
public boolean typesHaveTargets() {
|
||||
for (ProfiledType pt : types) {
|
||||
if (Utilities.existsInList(pt.getUri(), "Reference", "CodeableReference", "canonical", "http://hl7.org/fhir/StructureDefinition/Reference", "http://hl7.org/fhir/StructureDefinition/CodeableReference", "http://hl7.org/fhir/StructureDefinition/canonical")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
public void addTargets(Set<String> src) {
|
||||
if (src != null) {
|
||||
for (String s : src) {
|
||||
addTarget(s);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -212,7 +212,7 @@ public class ConceptMapRenderer extends TerminologyRenderer {
|
|||
} else
|
||||
tr.td().b().tx("Code");
|
||||
for (String s : sources.keySet()) {
|
||||
if (!s.equals("code")) {
|
||||
if (s != null && !s.equals("code")) {
|
||||
if (sources.get(s).size() == 1) {
|
||||
String url = sources.get(s).iterator().next();
|
||||
renderCSDetailsLink(tr, url, false);
|
||||
|
@ -229,7 +229,7 @@ public class ConceptMapRenderer extends TerminologyRenderer {
|
|||
} else
|
||||
tr.td().b().tx("Code");
|
||||
for (String s : targets.keySet()) {
|
||||
if (!s.equals("code")) {
|
||||
if (s != null && !s.equals("code")) {
|
||||
if (targets.get(s).size() == 1) {
|
||||
String url = targets.get(s).iterator().next();
|
||||
renderCSDetailsLink(tr, url, false);
|
||||
|
@ -291,7 +291,7 @@ public class ConceptMapRenderer extends TerminologyRenderer {
|
|||
td.style("border-top-style: none; border-left-width: 0px");
|
||||
}
|
||||
for (String s : sources.keySet()) {
|
||||
if (!s.equals("code")) {
|
||||
if (s != null && !s.equals("code")) {
|
||||
td = tr.td();
|
||||
if (first) {
|
||||
td.addText(getValue(ccm.getDependsOn(), s, sources.get(s).size() != 1));
|
||||
|
@ -323,7 +323,7 @@ public class ConceptMapRenderer extends TerminologyRenderer {
|
|||
tr.td().style("border-left-width: 0px").tx(display == null ? "" : display);
|
||||
|
||||
for (String s : targets.keySet()) {
|
||||
if (!s.equals("code")) {
|
||||
if (s != null && !s.equals("code")) {
|
||||
td = tr.td();
|
||||
td.addText(getValue(ccm.getProduct(), s, targets.get(s).size() != 1));
|
||||
display = getDisplay(ccm.getProduct(), s);
|
||||
|
|
|
@ -125,8 +125,12 @@ public class DiagnosticReportRenderer extends ResourceRenderer {
|
|||
|
||||
pw = getProperty(dr, "conclusion");
|
||||
if (valued(pw)) {
|
||||
if (pw.fhirType().equals("markdown")) {
|
||||
render(x, pw.value());
|
||||
} else {
|
||||
render(x.para(), pw.value());
|
||||
}
|
||||
}
|
||||
|
||||
pw = getProperty(dr, "conclusionCode");
|
||||
if (!valued(pw)) {
|
||||
|
|
|
@ -223,12 +223,15 @@ public abstract class ResourceRenderer extends DataRenderer {
|
|||
tr = resolveReference(rw, r.getReference());
|
||||
|
||||
if (!r.getReference().startsWith("#")) {
|
||||
if (tr != null && tr.getReference() != null)
|
||||
if (tr != null && tr.getReference() != null) {
|
||||
c = x.ah(tr.getReference());
|
||||
else
|
||||
c = x.ah(r.getReference());
|
||||
} else if (r.getReference().contains("?")) {
|
||||
x.tx("Conditional Reference: ");
|
||||
c = x.code("");
|
||||
} else {
|
||||
c = x.ah(r.getReference());
|
||||
}
|
||||
} else {
|
||||
|
||||
c = x.ah(r.getReference());
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -77,6 +77,7 @@ import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator;
|
|||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Cell;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Piece;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Row;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableGenerationMode;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableModel;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.Title;
|
||||
import org.hl7.fhir.utilities.xhtml.NodeType;
|
||||
|
@ -324,7 +325,7 @@ public class StructureDefinitionRenderer extends ResourceRenderer {
|
|||
model = initCustomTable(gen, corePath, false, true, profile.getId()+(diff ? "d" : "s"), rc.getRules() == GenerationRules.IG_PUBLISHER, columns);
|
||||
break;
|
||||
case SUMMARY:
|
||||
model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), rc.getRules() == GenerationRules.IG_PUBLISHER);
|
||||
model = gen.initNormalTable(corePath, false, true, profile.getId()+(diff ? "d" : "s"), rc.getRules() == GenerationRules.IG_PUBLISHER, rc.getRules() == GenerationRules.IG_PUBLISHER ? TableGenerationMode.XHTML : TableGenerationMode.XML);
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unknown structure mode");
|
||||
|
@ -2052,7 +2053,7 @@ public class StructureDefinitionRenderer extends ResourceRenderer {
|
|||
} else if (definition.hasExample()) {
|
||||
for (ElementDefinitionExampleComponent ex : definition.getExample()) {
|
||||
if (!c.getPieces().isEmpty()) { c.addPiece(gen.new Piece("br")); }
|
||||
c.getPieces().add(checkForNoChange(ex, gen.new Piece(null, "Example'"+("".equals("General")? "" : " "+ex.getLabel()+"'")+": ", null).addStyle("font-weight:bold")));
|
||||
c.getPieces().add(checkForNoChange(ex, gen.new Piece(null, "Example'"+("".equals("General")? "": " "+ex.getLabel()+"'")+": ", "").addStyle("font-weight:bold")));
|
||||
c.getPieces().add(checkForNoChange(ex, gen.new Piece(null, buildJson(ex.getValue()), null).addStyle("color: darkgreen")));
|
||||
}
|
||||
}
|
||||
|
@ -2667,7 +2668,7 @@ public class StructureDefinitionRenderer extends ResourceRenderer {
|
|||
public XhtmlNode generateExtensionTable(String defFile, StructureDefinition ed, String imageFolder, boolean inlineGraphics, boolean full, String corePath, String imagePath, Set<String> outputTracker, RenderingContext rc) throws IOException, FHIRException {
|
||||
HierarchicalTableGenerator gen = new HierarchicalTableGenerator(imageFolder, inlineGraphics, true);
|
||||
gen.setTranslator(getTranslator());
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId()+(full ? "f" : "n"), true);
|
||||
TableModel model = gen.initNormalTable(corePath, false, true, ed.getId()+(full ? "f" : "n"), true, TableGenerationMode.XHTML);
|
||||
|
||||
boolean deep = false;
|
||||
String m = "";
|
||||
|
|
|
@ -52,4 +52,14 @@ public class TestPackageLoader implements IContextResourceLoader {
|
|||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPatchUrls(boolean value) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String patchUrl(String url, String resourceType) {
|
||||
return url;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -89,6 +89,7 @@ public class FHIRLexer {
|
|||
private String name;
|
||||
private boolean liquidMode; // in liquid mode, || terminates the expression and hands the parser back to the host
|
||||
private SourceLocation commentLocation;
|
||||
private boolean metadataFormat;
|
||||
|
||||
public FHIRLexer(String source, String name) throws FHIRLexerException {
|
||||
this.source = source == null ? "" : source;
|
||||
|
@ -102,6 +103,13 @@ public class FHIRLexer {
|
|||
currentLocation = new SourceLocation(1, 1);
|
||||
next();
|
||||
}
|
||||
public FHIRLexer(String source, String name, boolean metadataFormat) throws FHIRLexerException {
|
||||
this.source = source == null ? "" : source;
|
||||
this.name = name == null ? "??" : name;
|
||||
this.metadataFormat = metadataFormat;
|
||||
currentLocation = new SourceLocation(1, 1);
|
||||
next();
|
||||
}
|
||||
public String getCurrent() {
|
||||
return current;
|
||||
}
|
||||
|
@ -211,10 +219,13 @@ public class FHIRLexer {
|
|||
} else if (ch == '/') {
|
||||
cursor++;
|
||||
if (cursor < source.length() && (source.charAt(cursor) == '/')) {
|
||||
// this is en error - should already have been skipped
|
||||
error("This shouldn't happen?");
|
||||
}
|
||||
// we've run into metadata
|
||||
cursor++;
|
||||
cursor++;
|
||||
current = source.substring(currentStart, cursor);
|
||||
} else {
|
||||
current = source.substring(currentStart, cursor);
|
||||
}
|
||||
} else if (ch == '$') {
|
||||
cursor++;
|
||||
while (cursor < source.length() && (source.charAt(cursor) >= 'a' && source.charAt(cursor) <= 'z'))
|
||||
|
@ -309,7 +320,7 @@ public class FHIRLexer {
|
|||
boolean last13 = false;
|
||||
boolean done = false;
|
||||
while (cursor < source.length() && !done) {
|
||||
if (cursor < source.length() -1 && "//".equals(source.substring(cursor, cursor+2))) {
|
||||
if (cursor < source.length() -1 && "//".equals(source.substring(cursor, cursor+2)) && !isMetadataStart()) {
|
||||
commentLocation = currentLocation;
|
||||
int start = cursor+2;
|
||||
while (cursor < source.length() && !((source.charAt(cursor) == '\r') || source.charAt(cursor) == '\n')) {
|
||||
|
@ -338,6 +349,10 @@ public class FHIRLexer {
|
|||
}
|
||||
}
|
||||
|
||||
private boolean isMetadataStart() {
|
||||
return metadataFormat && cursor < source.length() - 2 && "///".equals(source.substring(cursor, cursor+3));
|
||||
}
|
||||
|
||||
private boolean isDateChar(char ch,int start) {
|
||||
int eot = source.charAt(start+1) == 'T' ? 10 : 20;
|
||||
|
||||
|
@ -550,5 +565,11 @@ public class FHIRLexer {
|
|||
public SourceLocation getCommentLocation() {
|
||||
return this.commentLocation;
|
||||
}
|
||||
public boolean isMetadataFormat() {
|
||||
return metadataFormat;
|
||||
}
|
||||
public void setMetadataFormat(boolean metadataFormat) {
|
||||
this.metadataFormat = metadataFormat;
|
||||
}
|
||||
|
||||
}
|
|
@ -30,6 +30,7 @@ import org.hl7.fhir.r5.context.IWorkerContext.ValidationResult;
|
|||
import org.hl7.fhir.r5.model.Base;
|
||||
import org.hl7.fhir.r5.model.BaseDateTimeType;
|
||||
import org.hl7.fhir.r5.model.BooleanType;
|
||||
import org.hl7.fhir.r5.model.CanonicalType;
|
||||
import org.hl7.fhir.r5.model.CodeableConcept;
|
||||
import org.hl7.fhir.r5.model.Constants;
|
||||
import org.hl7.fhir.r5.model.DateTimeType;
|
||||
|
@ -386,6 +387,7 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
initFlags();
|
||||
cu = new ContextUtilities(worker);
|
||||
}
|
||||
|
||||
private void initFlags() {
|
||||
|
@ -578,6 +580,23 @@ public class FHIRPathEngine {
|
|||
* @if the path is not valid
|
||||
*/
|
||||
public TypeDetails check(Object appContext, String resourceType, String context, ExpressionNode expr) throws FHIRLexerException, PathEngineException, DefinitionException {
|
||||
return check(appContext, resourceType, context, expr, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* check that paths referred to in the ExpressionNode are valid
|
||||
*
|
||||
* xPathStartsWithValueRef is a hack work around for the fact that FHIR Path sometimes needs a different starting point than the xpath
|
||||
*
|
||||
* returns a list of the possible types that might be returned by executing the ExpressionNode against a particular context
|
||||
*
|
||||
* @param context - the logical type against which this path is applied
|
||||
* @throws DefinitionException
|
||||
* @throws PathEngineException
|
||||
* @if the path is not valid
|
||||
*/
|
||||
public TypeDetails check(Object appContext, String resourceType, String context, ExpressionNode expr, Set<ElementDefinition> elementDependencies) throws FHIRLexerException, PathEngineException, DefinitionException {
|
||||
|
||||
// if context is a path that refers to a type, do that conversion now
|
||||
TypeDetails types;
|
||||
if (context == null) {
|
||||
|
@ -610,7 +629,33 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
|
||||
return executeType(new ExecutionTypeContext(appContext, resourceType, types, types), types, expr, true);
|
||||
return executeType(new ExecutionTypeContext(appContext, resourceType, types, types), types, expr, elementDependencies, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* check that paths referred to in the ExpressionNode are valid
|
||||
*
|
||||
* xPathStartsWithValueRef is a hack work around for the fact that FHIR Path sometimes needs a different starting point than the xpath
|
||||
*
|
||||
* returns a list of the possible types that might be returned by executing the ExpressionNode against a particular context
|
||||
*
|
||||
* @throws DefinitionException
|
||||
* @throws PathEngineException
|
||||
* @if the path is not valid
|
||||
*/
|
||||
public TypeDetails check(Object appContext, String resourceType, List<String> resourceTypes, ExpressionNode expr, Set<ElementDefinition> elementDependencies) throws FHIRLexerException, PathEngineException, DefinitionException {
|
||||
|
||||
// if context is a path that refers to a type, do that conversion now
|
||||
TypeDetails types = null;
|
||||
for (String rt : resourceTypes) {
|
||||
if (types == null) {
|
||||
types = new TypeDetails(CollectionStatus.SINGLETON, rt);
|
||||
} else {
|
||||
types.addType(rt);
|
||||
}
|
||||
}
|
||||
|
||||
return executeType(new ExecutionTypeContext(appContext, resourceType, types, types), types, expr, elementDependencies, true);
|
||||
}
|
||||
|
||||
private FHIRException makeExceptionPlural(Integer num, ExpressionNode holder, String constName, Object... args) {
|
||||
|
@ -659,13 +704,13 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
|
||||
return executeType(new ExecutionTypeContext(appContext, sd.getUrl(), types, types), types, expr, true);
|
||||
return executeType(new ExecutionTypeContext(appContext, sd.getUrl(), types, types), types, expr, null, true);
|
||||
}
|
||||
|
||||
public TypeDetails check(Object appContext, StructureDefinition sd, ExpressionNode expr) throws FHIRLexerException, PathEngineException, DefinitionException {
|
||||
// if context is a path that refers to a type, do that conversion now
|
||||
TypeDetails types = null; // this is a special case; the first path reference will have to resolve to something in the context
|
||||
return executeType(new ExecutionTypeContext(appContext, sd == null ? null : sd.getUrl(), null, types), types, expr, true);
|
||||
return executeType(new ExecutionTypeContext(appContext, sd == null ? null : sd.getUrl(), null, types), types, expr, null, true);
|
||||
}
|
||||
|
||||
public TypeDetails check(Object appContext, String resourceType, String context, String expr) throws FHIRLexerException, PathEngineException, DefinitionException {
|
||||
|
@ -1557,7 +1602,7 @@ public class FHIRPathEngine {
|
|||
return new TypeDetails(CollectionStatus.SINGLETON, exp.getName());
|
||||
}
|
||||
|
||||
private TypeDetails executeType(ExecutionTypeContext context, TypeDetails focus, ExpressionNode exp, boolean atEntry) throws PathEngineException, DefinitionException {
|
||||
private TypeDetails executeType(ExecutionTypeContext context, TypeDetails focus, ExpressionNode exp, Set<ElementDefinition> elementDependencies, boolean atEntry) throws PathEngineException, DefinitionException {
|
||||
TypeDetails result = new TypeDetails(null);
|
||||
switch (exp.getKind()) {
|
||||
case Name:
|
||||
|
@ -1571,7 +1616,7 @@ public class FHIRPathEngine {
|
|||
result.update(executeContextType(context, exp.getName(), exp));
|
||||
} else {
|
||||
for (String s : focus.getTypes()) {
|
||||
result.update(executeType(s, exp, atEntry));
|
||||
result.update(executeType(s, exp, atEntry, focus, elementDependencies));
|
||||
}
|
||||
if (result.hasNoTypes()) {
|
||||
throw makeException(exp, I18nConstants.FHIRPATH_UNKNOWN_NAME, exp.getName(), focus.describe());
|
||||
|
@ -1579,7 +1624,7 @@ public class FHIRPathEngine {
|
|||
}
|
||||
break;
|
||||
case Function:
|
||||
result.update(evaluateFunctionType(context, focus, exp));
|
||||
result.update(evaluateFunctionType(context, focus, exp, elementDependencies));
|
||||
break;
|
||||
case Unary:
|
||||
result.addType(TypeDetails.FP_Integer);
|
||||
|
@ -1590,12 +1635,12 @@ public class FHIRPathEngine {
|
|||
result.update(resolveConstantType(context, exp.getConstant(), exp));
|
||||
break;
|
||||
case Group:
|
||||
result.update(executeType(context, focus, exp.getGroup(), atEntry));
|
||||
result.update(executeType(context, focus, exp.getGroup(), elementDependencies, atEntry));
|
||||
}
|
||||
exp.setTypes(result);
|
||||
|
||||
if (exp.getInner() != null) {
|
||||
result = executeType(context, result, exp.getInner(), false);
|
||||
result = executeType(context, result, exp.getInner(), elementDependencies, false);
|
||||
}
|
||||
|
||||
if (exp.isProximal() && exp.getOperation() != null) {
|
||||
|
@ -1606,7 +1651,7 @@ public class FHIRPathEngine {
|
|||
if (last.getOperation() == Operation.Is || last.getOperation() == Operation.As) {
|
||||
work = executeTypeName(context, focus, next, atEntry);
|
||||
} else {
|
||||
work = executeType(context, focus, next, atEntry);
|
||||
work = executeType(context, focus, next, elementDependencies, atEntry);
|
||||
}
|
||||
result = operateTypes(result, last.getOperation(), work, last);
|
||||
last = next;
|
||||
|
@ -1917,7 +1962,12 @@ public class FHIRPathEngine {
|
|||
case LessOrEqual: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case GreaterOrEqual: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case Is: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case As: return new TypeDetails(CollectionStatus.SINGLETON, right.getTypes());
|
||||
case As:
|
||||
TypeDetails td = new TypeDetails(CollectionStatus.SINGLETON, right.getTypes());
|
||||
if (td.typesHaveTargets()) {
|
||||
td.addTargets(left.getTargets());
|
||||
}
|
||||
return td;
|
||||
case Union: return left.union(right);
|
||||
case Or: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case And: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
|
@ -3102,12 +3152,12 @@ public class FHIRPathEngine {
|
|||
return hostServices.resolveConstantType(context.appInfo, name);
|
||||
}
|
||||
|
||||
private TypeDetails executeType(String type, ExpressionNode exp, boolean atEntry) throws PathEngineException, DefinitionException {
|
||||
private TypeDetails executeType(String type, ExpressionNode exp, boolean atEntry, TypeDetails focus, Set<ElementDefinition> elementDependencies) throws PathEngineException, DefinitionException {
|
||||
if (atEntry && Character.isUpperCase(exp.getName().charAt(0)) && hashTail(type).equals(exp.getName())) { // special case for start up
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, type);
|
||||
}
|
||||
TypeDetails result = new TypeDetails(null);
|
||||
getChildTypesByName(type, exp.getName(), result, exp);
|
||||
getChildTypesByName(type, exp.getName(), result, exp, focus, elementDependencies);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -3118,7 +3168,7 @@ public class FHIRPathEngine {
|
|||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private TypeDetails evaluateFunctionType(ExecutionTypeContext context, TypeDetails focus, ExpressionNode exp) throws PathEngineException, DefinitionException {
|
||||
private TypeDetails evaluateFunctionType(ExecutionTypeContext context, TypeDetails focus, ExpressionNode exp, Set<ElementDefinition> elementDependencies) throws PathEngineException, DefinitionException {
|
||||
List<TypeDetails> paramTypes = new ArrayList<TypeDetails>();
|
||||
if (exp.getFunction() == Function.Is || exp.getFunction() == Function.As || exp.getFunction() == Function.OfType) {
|
||||
paramTypes.add(new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_String));
|
||||
|
@ -3126,9 +3176,9 @@ public class FHIRPathEngine {
|
|||
int i = 0;
|
||||
for (ExpressionNode expr : exp.getParameters()) {
|
||||
if (isExpressionParameter(exp, i)) {
|
||||
paramTypes.add(executeType(changeThis(context, focus), focus, expr, true));
|
||||
paramTypes.add(executeType(changeThis(context, focus), focus, expr, elementDependencies, true));
|
||||
} else {
|
||||
paramTypes.add(executeType(context, context.thisItem, expr, true));
|
||||
paramTypes.add(executeType(context, context.thisItem, expr, elementDependencies, true));
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
@ -3159,11 +3209,11 @@ public class FHIRPathEngine {
|
|||
case Where :
|
||||
return focus;
|
||||
case Select :
|
||||
return anything(focus.getCollectionStatus());
|
||||
return paramTypes.get(0);
|
||||
case All :
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case Repeat :
|
||||
return anything(focus.getCollectionStatus());
|
||||
return paramTypes.get(0); // this might be a little more complicated...
|
||||
case Aggregate :
|
||||
return anything(focus.getCollectionStatus());
|
||||
case Item : {
|
||||
|
@ -3173,11 +3223,19 @@ public class FHIRPathEngine {
|
|||
}
|
||||
case As : {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes, new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_String));
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
TypeDetails td = new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
if (td.typesHaveTargets()) {
|
||||
td.addTargets(focus.getTargets());
|
||||
}
|
||||
return td;
|
||||
}
|
||||
case OfType : {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes, new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_String));
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
TypeDetails td = new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
if (td.typesHaveTargets()) {
|
||||
td.addTargets(focus.getTargets());
|
||||
}
|
||||
return td;
|
||||
}
|
||||
case Type : {
|
||||
boolean s = false;
|
||||
|
@ -3326,12 +3384,20 @@ public class FHIRPathEngine {
|
|||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_DateTime);
|
||||
case Resolve : {
|
||||
checkContextReference(focus, "resolve", exp);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, "DomainResource");
|
||||
return new TypeDetails(CollectionStatus.ORDERED, "DomainResource");
|
||||
}
|
||||
case Extension : {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes, new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_String));
|
||||
ExpressionNode p = exp.getParameters().get(0);
|
||||
if (p.getKind() == Kind.Constant && p.getConstant() != null) {
|
||||
String url = exp.getParameters().get(0).getConstant().primitiveValue();
|
||||
StructureDefinition sd = worker.fetchResource(StructureDefinition.class, url);
|
||||
if (sd != null) {
|
||||
return new TypeDetails(CollectionStatus.ORDERED, new ProfiledType(url));
|
||||
}
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, "Extension");
|
||||
}
|
||||
}
|
||||
case AnyTrue:
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case AllTrue:
|
||||
|
@ -3572,7 +3638,7 @@ public class FHIRPathEngine {
|
|||
private TypeDetails childTypes(TypeDetails focus, String mask, ExpressionNode expr) throws PathEngineException, DefinitionException {
|
||||
TypeDetails result = new TypeDetails(CollectionStatus.UNORDERED);
|
||||
for (String f : focus.getTypes()) {
|
||||
getChildTypesByName(f, mask, result, expr);
|
||||
getChildTypesByName(f, mask, result, expr, null, null);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -4011,6 +4077,7 @@ public class FHIRPathEngine {
|
|||
}
|
||||
|
||||
private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
|
||||
private ContextUtilities cu;
|
||||
public static String bytesToHex(byte[] bytes) {
|
||||
char[] hexChars = new char[bytes.length * 2];
|
||||
for (int j = 0; j < bytes.length; j++) {
|
||||
|
@ -5693,7 +5760,7 @@ public class FHIRPathEngine {
|
|||
|
||||
}
|
||||
|
||||
private void getChildTypesByName(String type, String name, TypeDetails result, ExpressionNode expr) throws PathEngineException, DefinitionException {
|
||||
private void getChildTypesByName(String type, String name, TypeDetails result, ExpressionNode expr, TypeDetails focus, Set<ElementDefinition> elementDependencies) throws PathEngineException, DefinitionException {
|
||||
if (Utilities.noString(type)) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_NO_TYPE, "", "getChildTypesByName");
|
||||
}
|
||||
|
@ -5737,11 +5804,11 @@ public class FHIRPathEngine {
|
|||
if (dt == null) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_NO_TYPE, ProfileUtilities.sdNs(t.getCode(), null), "getChildTypesByName");
|
||||
}
|
||||
addTypeAndDescendents(sdl, dt, new ContextUtilities(worker).allStructures());
|
||||
addTypeAndDescendents(sdl, dt, cu.allStructures());
|
||||
// also add any descendant types
|
||||
}
|
||||
} else {
|
||||
addTypeAndDescendents(sdl, sd, new ContextUtilities(worker).allStructures());
|
||||
addTypeAndDescendents(sdl, sd, cu.allStructures());
|
||||
if (type.contains("#")) {
|
||||
tail = type.substring(type.indexOf("#")+1);
|
||||
tail = tail.substring(tail.indexOf("."));
|
||||
|
@ -5753,7 +5820,17 @@ public class FHIRPathEngine {
|
|||
if (name.equals("**")) {
|
||||
assert(result.getCollectionStatus() == CollectionStatus.UNORDERED);
|
||||
for (ElementDefinition ed : sdi.getSnapshot().getElement()) {
|
||||
if (ed.getPath().startsWith(path))
|
||||
if (ed.getPath().startsWith(path)) {
|
||||
if (ed.hasContentReference()) {
|
||||
String cpath = ed.getContentReference();
|
||||
String tn = sdi.getType()+cpath;
|
||||
if (!result.hasType(worker, tn)) {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
getChildTypesByName(result.addType(tn), "**", result, expr, null, elementDependencies);
|
||||
}
|
||||
} else {
|
||||
for (TypeRefComponent t : ed.getType()) {
|
||||
if (t.hasCode() && t.getCodeElement().hasValue()) {
|
||||
String tn = null;
|
||||
|
@ -5765,11 +5842,19 @@ public class FHIRPathEngine {
|
|||
if (t.getCode().equals("Resource")) {
|
||||
for (String rn : worker.getResourceNames()) {
|
||||
if (!result.hasType(worker, rn)) {
|
||||
getChildTypesByName(result.addType(rn), "**", result, expr);
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
getChildTypesByName(result.addType(rn), "**", result, expr, null, elementDependencies);
|
||||
}
|
||||
}
|
||||
} else if (!result.hasType(worker, tn)) {
|
||||
getChildTypesByName(result.addType(tn), "**", result, expr);
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
getChildTypesByName(result.addType(tn), "**", result, expr, null, elementDependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5780,13 +5865,26 @@ public class FHIRPathEngine {
|
|||
if (ed.getPath().startsWith(path) && !ed.getPath().substring(path.length()).contains("."))
|
||||
for (TypeRefComponent t : ed.getType()) {
|
||||
if (Utilities.noString(t.getCode())) { // Element.id or Extension.url
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
result.addType("System.string");
|
||||
} else if (t.getCode().equals("Element") || t.getCode().equals("BackboneElement")) {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
result.addType(sdi.getType()+"#"+ed.getPath());
|
||||
} else if (t.getCode().equals("Resource")) {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
result.addTypes(worker.getResourceNames());
|
||||
} else {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
result.addType(t.getCode());
|
||||
copyTargetProfiles(ed, t, focus, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5795,12 +5893,18 @@ public class FHIRPathEngine {
|
|||
|
||||
ElementDefinitionMatch ed = getElementDefinition(sdi, path, isAllowPolymorphicNames(), expr);
|
||||
if (ed != null) {
|
||||
if (!Utilities.noString(ed.getFixedType()))
|
||||
if (!Utilities.noString(ed.getFixedType())) {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed.definition);
|
||||
}
|
||||
result.addType(ed.getFixedType());
|
||||
else {
|
||||
} else {
|
||||
for (TypeRefComponent t : ed.getDefinition().getType()) {
|
||||
if (Utilities.noString(t.getCode())) {
|
||||
if (Utilities.existsInList(ed.getDefinition().getId(), "Element.id", "Extension.url") || Utilities.existsInList(ed.getDefinition().getBase().getPath(), "Resource.id", "Element.id", "Extension.url")) {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed.definition);
|
||||
}
|
||||
result.addType(TypeDetails.FP_NS, "string");
|
||||
}
|
||||
break; // throw new PathEngineException("Illegal reference to primitive value attribute @ "+path);
|
||||
|
@ -5810,6 +5914,9 @@ public class FHIRPathEngine {
|
|||
if (t.getCode().equals("Element") || t.getCode().equals("BackboneElement")) {
|
||||
pt = new ProfiledType(sdi.getUrl()+"#"+path);
|
||||
} else if (t.getCode().equals("Resource")) {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed.definition);
|
||||
}
|
||||
result.addTypes(worker.getResourceNames());
|
||||
} else {
|
||||
pt = new ProfiledType(t.getCode());
|
||||
|
@ -5821,7 +5928,11 @@ public class FHIRPathEngine {
|
|||
if (ed.getDefinition().hasBinding()) {
|
||||
pt.addBinding(ed.getDefinition().getBinding());
|
||||
}
|
||||
result.addType(pt);
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed.definition);
|
||||
}
|
||||
result.addType(ed.definition.unbounded() ? CollectionStatus.ORDERED : CollectionStatus.SINGLETON, pt);
|
||||
copyTargetProfiles(ed.getDefinition(), t, focus, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5831,6 +5942,18 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
|
||||
private void copyTargetProfiles(ElementDefinition ed, TypeRefComponent t, TypeDetails focus, TypeDetails result) {
|
||||
if (t.hasTargetProfile()) {
|
||||
for (CanonicalType u : t.getTargetProfile()) {
|
||||
result.addTarget(u.primitiveValue());
|
||||
}
|
||||
} else if (focus != null && focus.hasType("CodeableReference") && ed.getPath().endsWith(".reference") && focus.getTargets() != null) { // special case, targets are on parent
|
||||
for (String s : focus.getTargets()) {
|
||||
result.addTarget(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addTypeAndDescendents(List<StructureDefinition> sdl, StructureDefinition dt, List<StructureDefinition> types) {
|
||||
sdl.add(dt);
|
||||
for (StructureDefinition sd : types) {
|
||||
|
@ -5860,7 +5983,7 @@ public class FHIRPathEngine {
|
|||
}
|
||||
|
||||
|
||||
private ElementDefinitionMatch getElementDefinition(StructureDefinition sd, String path, boolean allowTypedName, ExpressionNode expr) throws PathEngineException {
|
||||
public ElementDefinitionMatch getElementDefinition(StructureDefinition sd, String path, boolean allowTypedName, ExpressionNode expr) throws PathEngineException {
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement()) {
|
||||
if (ed.getPath().equals(path)) {
|
||||
if (ed.hasContentReference()) {
|
||||
|
@ -6229,5 +6352,8 @@ public class FHIRPathEngine {
|
|||
this.liquidMode = liquidMode;
|
||||
}
|
||||
|
||||
public ProfileUtilities getProfileUtilities() {
|
||||
return profileUtilities;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -103,7 +103,8 @@ public class StructureMapUtilities {
|
|||
public static final String MAP_SEARCH_EXPRESSION = "map.search.expression";
|
||||
public static final String MAP_EXPRESSION = "map.transform.expression";
|
||||
private static final boolean RENDER_MULTIPLE_TARGETS_ONELINE = true;
|
||||
private static final String AUTO_VAR_NAME = "vvv";
|
||||
public static final String AUTO_VAR_NAME = "vvv";
|
||||
public static final String DEF_GROUP_NAME = "DefaultMappingGroupAnonymousAlias";
|
||||
|
||||
private final IWorkerContext worker;
|
||||
private final FHIRPathEngine fpe;
|
||||
|
@ -619,15 +620,38 @@ public class StructureMapUtilities {
|
|||
}
|
||||
|
||||
public StructureMap parse(String text, String srcName) throws FHIRException {
|
||||
FHIRLexer lexer = new FHIRLexer(text, srcName);
|
||||
FHIRLexer lexer = new FHIRLexer(Utilities.stripBOM(text), srcName, true);
|
||||
if (lexer.done())
|
||||
throw lexer.error("Map Input cannot be empty");
|
||||
lexer.token("map");
|
||||
StructureMap result = new StructureMap();
|
||||
if (lexer.hasToken("map")) {
|
||||
lexer.token("map");
|
||||
result.setUrl(lexer.readConstant("url"));
|
||||
lexer.token("=");
|
||||
result.setName(lexer.readConstant("name"));
|
||||
result.setDescription(lexer.getAllComments());
|
||||
result.setStatus(PublicationStatus.DRAFT);
|
||||
} else {
|
||||
while (lexer.hasToken("///")) {
|
||||
lexer.next();
|
||||
String fid = lexer.takeDottedToken();
|
||||
lexer.token("=");
|
||||
switch (fid) {
|
||||
case "url" :
|
||||
result.setUrl(lexer.readConstant("url"));
|
||||
break;
|
||||
case "name" :
|
||||
result.setName(lexer.readConstant("name"));
|
||||
break;
|
||||
case "title" :
|
||||
result.setTitle(lexer.readConstant("title"));
|
||||
break;
|
||||
default:
|
||||
lexer.readConstant("nothing");
|
||||
// nothing
|
||||
}
|
||||
}
|
||||
}
|
||||
while (lexer.hasToken("conceptmap"))
|
||||
parseConceptMap(result, lexer);
|
||||
|
||||
|
@ -643,36 +667,6 @@ public class StructureMapUtilities {
|
|||
return result;
|
||||
}
|
||||
|
||||
public Element parseEM(String text, String srcName, List<ValidationMessage> list) throws FHIRException {
|
||||
FHIRLexer lexer = new FHIRLexer(text, srcName);
|
||||
if (lexer.done())
|
||||
throw lexer.error("Map Input cannot be empty");
|
||||
lexer.token("map");
|
||||
Element result = Manager.build(worker, worker.fetchTypeDefinition("StructureMap"));
|
||||
try {
|
||||
result.makeElement("url").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("url"));
|
||||
lexer.token("=");
|
||||
result.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("name"));
|
||||
result.makeElement("description").markLocation(lexer.getCurrentLocation()).setValue(lexer.getAllComments());
|
||||
while (lexer.hasToken("conceptmap"))
|
||||
parseConceptMapEM(result, lexer);
|
||||
|
||||
while (lexer.hasToken("uses"))
|
||||
parseUsesEM(result, lexer);
|
||||
while (lexer.hasToken("imports"))
|
||||
parseImportsEM(result, lexer);
|
||||
|
||||
while (!lexer.done()) {
|
||||
parseGroupEM(result, lexer);
|
||||
}
|
||||
} catch (FHIRLexerException e) {
|
||||
list.add(new ValidationMessage(Source.InstanceValidator, IssueType.INVALID, e.getLocation().getLine(), e.getLocation().getColumn(), null, e.getMessage(), IssueSeverity.FATAL));
|
||||
} catch (Exception e) {
|
||||
list.add(new ValidationMessage(Source.InstanceValidator, IssueType.INVALID, null, e.getMessage(), IssueSeverity.FATAL));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private void parseConceptMap(StructureMap result, FHIRLexer lexer) throws FHIRLexerException {
|
||||
lexer.token("conceptmap");
|
||||
|
@ -730,63 +724,7 @@ public class StructureMapUtilities {
|
|||
lexer.token("}");
|
||||
}
|
||||
|
||||
private void parseConceptMapEM(Element result, FHIRLexer lexer) throws FHIRLexerException {
|
||||
lexer.token("conceptmap");
|
||||
Element map = Manager.build(worker, worker.fetchTypeDefinition("ConceptMap"));
|
||||
Element eid = map.makeElement("id").markLocation(lexer.getCurrentLocation());
|
||||
String id = lexer.readConstant("map id");
|
||||
if (id.startsWith("#"))
|
||||
throw lexer.error("Concept Map identifier must start with #");
|
||||
eid.setValue(id);
|
||||
map.makeElement("status").setValue(PublicationStatus.DRAFT.toCode()); // todo: how to add this to the text format
|
||||
result.makeElement("contained").setElement("resource", map);
|
||||
lexer.token("{");
|
||||
// lexer.token("source");
|
||||
// map.setSource(new UriType(lexer.readConstant("source")));
|
||||
// lexer.token("target");
|
||||
// map.setSource(new UriType(lexer.readConstant("target")));
|
||||
Map<String, String> prefixes = new HashMap<String, String>();
|
||||
while (lexer.hasToken("prefix")) {
|
||||
lexer.token("prefix");
|
||||
String n = lexer.take();
|
||||
lexer.token("=");
|
||||
String v = lexer.readConstant("prefix url");
|
||||
prefixes.put(n, v);
|
||||
}
|
||||
while (lexer.hasToken("unmapped")) {
|
||||
lexer.token("unmapped");
|
||||
lexer.token("for");
|
||||
String n = readPrefix(prefixes, lexer);
|
||||
Element g = getGroupE(map, n, null);
|
||||
lexer.token("=");
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
String v = lexer.take();
|
||||
if (v.equals("provided")) {
|
||||
g.makeElement("unmapped").makeElement("mode").markLocation(loc).setValue(ConceptMapGroupUnmappedMode.USESOURCECODE.toCode());
|
||||
} else
|
||||
throw lexer.error("Only unmapped mode PROVIDED is supported at this time");
|
||||
}
|
||||
while (!lexer.hasToken("}")) {
|
||||
String srcs = readPrefix(prefixes, lexer);
|
||||
lexer.token(":");
|
||||
SourceLocation scloc = lexer.getCurrentLocation();
|
||||
String sc = lexer.getCurrent().startsWith("\"") ? lexer.readConstant("code") : lexer.take();
|
||||
SourceLocation relLoc = lexer.getCurrentLocation();
|
||||
ConceptMapRelationship rel = readRelationship(lexer);
|
||||
String tgts = readPrefix(prefixes, lexer);
|
||||
Element g = getGroupE(map, srcs, tgts);
|
||||
Element e = g.addElement("element");
|
||||
e.makeElement("code").markLocation(scloc).setValue(sc.startsWith("\"") ? lexer.processConstant(sc) : sc);
|
||||
Element tgt = e.addElement("target");
|
||||
tgt.makeElement("relationship").markLocation(relLoc).setValue(rel.toCode());
|
||||
lexer.token(":");
|
||||
tgt.makeElement("code").markLocation(lexer.getCurrentLocation()).setValue(lexer.getCurrent().startsWith("\"") ? lexer.readConstant("code") : lexer.take());
|
||||
if (lexer.hasComments()) {
|
||||
tgt.makeElement("comment").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
}
|
||||
lexer.token("}");
|
||||
}
|
||||
|
||||
|
||||
private ConceptMapGroupComponent getGroup(ConceptMap map, String srcs, String tgts) {
|
||||
for (ConceptMapGroupComponent grp : map.getGroup()) {
|
||||
|
@ -803,22 +741,6 @@ public class StructureMapUtilities {
|
|||
return grp;
|
||||
}
|
||||
|
||||
private Element getGroupE(Element map, String srcs, String tgts) {
|
||||
for (Element grp : map.getChildrenByName("group")) {
|
||||
if (grp.getChildValue("source").equals(srcs)) {
|
||||
Element tgt = grp.getNamedChild("target");
|
||||
if (tgt == null || tgts == null || tgts.equals(tgt.getValue())) {
|
||||
if (tgt == null && tgts != null)
|
||||
grp.makeElement("target").setValue(tgts);
|
||||
return grp;
|
||||
}
|
||||
}
|
||||
}
|
||||
Element grp = map.addElement("group");
|
||||
grp.makeElement("source").setValue(srcs);
|
||||
grp.makeElement("target").setValue(tgts);
|
||||
return grp;
|
||||
}
|
||||
|
||||
|
||||
private String readPrefix(Map<String, String> prefixes, FHIRLexer lexer) throws FHIRLexerException {
|
||||
|
@ -859,21 +781,7 @@ public class StructureMapUtilities {
|
|||
st.setDocumentation(lexer.getFirstComment());
|
||||
}
|
||||
|
||||
private void parseUsesEM(Element result, FHIRLexer lexer) throws FHIRException {
|
||||
lexer.token("uses");
|
||||
Element st = result.addElement("structure");
|
||||
st.makeElement("url").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("url"));
|
||||
if (lexer.hasToken("alias")) {
|
||||
lexer.token("alias");
|
||||
st.makeElement("alias").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
lexer.token("as");
|
||||
st.makeElement("mode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
lexer.skipToken(";");
|
||||
if (lexer.hasComments()) {
|
||||
st.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void parseImports(StructureMap result, FHIRLexer lexer) throws FHIRException {
|
||||
lexer.token("imports");
|
||||
|
@ -881,12 +789,6 @@ public class StructureMapUtilities {
|
|||
lexer.skipToken(";");
|
||||
}
|
||||
|
||||
private void parseImportsEM(Element result, FHIRLexer lexer) throws FHIRException {
|
||||
lexer.token("imports");
|
||||
result.addElement("import").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("url"));
|
||||
lexer.skipToken(";");
|
||||
}
|
||||
|
||||
private void parseGroup(StructureMap result, FHIRLexer lexer) throws FHIRException {
|
||||
String comment = lexer.getAllComments();
|
||||
lexer.token("group");
|
||||
|
@ -959,78 +861,7 @@ public class StructureMapUtilities {
|
|||
lexer.next();
|
||||
}
|
||||
|
||||
private void parseGroupEM(Element result, FHIRLexer lexer) throws FHIRException {
|
||||
SourceLocation commLoc = lexer.getCommentLocation();
|
||||
String comment = lexer.getAllComments();
|
||||
lexer.token("group");
|
||||
Element group = result.addElement("group");
|
||||
if (comment != null) {
|
||||
group.makeElement("documentation").markLocation(commLoc).setValue(comment);
|
||||
}
|
||||
boolean newFmt = false;
|
||||
if (lexer.hasToken("for")) {
|
||||
lexer.token("for");
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
if ("type".equals(lexer.getCurrent())) {
|
||||
lexer.token("type");
|
||||
lexer.token("+");
|
||||
lexer.token("types");
|
||||
group.makeElement("typeMode").markLocation(loc).setValue(StructureMapGroupTypeMode.TYPEANDTYPES.toCode());
|
||||
} else {
|
||||
lexer.token("types");
|
||||
group.makeElement("typeMode").markLocation(loc).setValue(StructureMapGroupTypeMode.TYPES.toCode());
|
||||
}
|
||||
}
|
||||
group.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (lexer.hasToken("(")) {
|
||||
newFmt = true;
|
||||
lexer.take();
|
||||
while (!lexer.hasToken(")")) {
|
||||
parseInputEM(group, lexer, true);
|
||||
if (lexer.hasToken(","))
|
||||
lexer.token(",");
|
||||
}
|
||||
lexer.take();
|
||||
}
|
||||
if (lexer.hasToken("extends")) {
|
||||
lexer.next();
|
||||
group.makeElement("extends").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (newFmt) {
|
||||
if (lexer.hasToken("<")) {
|
||||
lexer.token("<");
|
||||
lexer.token("<");
|
||||
if (lexer.hasToken("types")) {
|
||||
group.makeElement("typeMode").markLocation(lexer.getCurrentLocation()).setValue(StructureMapGroupTypeMode.TYPES.toCode());
|
||||
} else {
|
||||
group.makeElement("typeMode").markLocation(lexer.getCurrentLocation()).setValue(StructureMapGroupTypeMode.TYPEANDTYPES.toCode());
|
||||
lexer.token("type");
|
||||
lexer.token("+");
|
||||
}
|
||||
lexer.token(">");
|
||||
lexer.token(">");
|
||||
}
|
||||
lexer.token("{");
|
||||
}
|
||||
if (newFmt) {
|
||||
while (!lexer.hasToken("}")) {
|
||||
if (lexer.done())
|
||||
throw lexer.error("premature termination expecting 'endgroup'");
|
||||
parseRuleEM(result, group, lexer, true);
|
||||
}
|
||||
} else {
|
||||
while (lexer.hasToken("input"))
|
||||
parseInputEM(group, lexer, false);
|
||||
while (!lexer.hasToken("endgroup")) {
|
||||
if (lexer.done())
|
||||
throw lexer.error("premature termination expecting 'endgroup'");
|
||||
parseRuleEM(result, group, lexer, false);
|
||||
}
|
||||
}
|
||||
lexer.next();
|
||||
if (newFmt && lexer.hasToken(";"))
|
||||
lexer.next();
|
||||
}
|
||||
|
||||
|
||||
private void parseInput(StructureMapGroupComponent group, FHIRLexer lexer, boolean newFmt) throws FHIRException {
|
||||
StructureMapGroupInputComponent input = group.addInput();
|
||||
|
@ -1051,26 +882,7 @@ public class StructureMapUtilities {
|
|||
}
|
||||
}
|
||||
|
||||
private void parseInputEM(Element group, FHIRLexer lexer, boolean newFmt) throws FHIRException {
|
||||
Element input = group.addElement("input");
|
||||
if (newFmt) {
|
||||
input.makeElement("mode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
} else
|
||||
lexer.token("input");
|
||||
input.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (lexer.hasToken(":")) {
|
||||
lexer.token(":");
|
||||
input.makeElement("type").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (!newFmt) {
|
||||
lexer.token("as");
|
||||
input.makeElement("mode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (lexer.hasComments()) {
|
||||
input.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
lexer.skipToken(";");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void parseRule(StructureMap map, List<StructureMapGroupRuleComponent> list, FHIRLexer lexer, boolean newFmt) throws FHIRException {
|
||||
StructureMapGroupRuleComponent rule = new StructureMapGroupRuleComponent();
|
||||
|
@ -1147,84 +959,6 @@ public class StructureMapUtilities {
|
|||
}
|
||||
}
|
||||
|
||||
private void parseRuleEM(Element map, Element context, FHIRLexer lexer, boolean newFmt) throws FHIRException {
|
||||
Element rule = context.addElement("rule");
|
||||
if (!newFmt) {
|
||||
rule.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.takeDottedToken());
|
||||
lexer.token(":");
|
||||
lexer.token("for");
|
||||
} else {
|
||||
if (lexer.hasComments()) {
|
||||
rule.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
}
|
||||
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
parseSourceEM(rule, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
if ((newFmt && lexer.hasToken("->")) || (!newFmt && lexer.hasToken("make"))) {
|
||||
lexer.token(newFmt ? "->" : "make");
|
||||
done = false;
|
||||
while (!done) {
|
||||
parseTargetEM(rule, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
}
|
||||
if (lexer.hasToken("then")) {
|
||||
lexer.token("then");
|
||||
if (lexer.hasToken("{")) {
|
||||
lexer.token("{");
|
||||
while (!lexer.hasToken("}")) {
|
||||
if (lexer.done())
|
||||
throw lexer.error("premature termination expecting '}' in nested group");
|
||||
parseRuleEM(map, rule, lexer, newFmt);
|
||||
}
|
||||
lexer.token("}");
|
||||
} else {
|
||||
done = false;
|
||||
while (!done) {
|
||||
parseRuleReferenceEM(rule, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!rule.hasChild("documentation") && lexer.hasComments()) {
|
||||
rule.makeElement("documentation").markLocation(lexer.getCommentLocation()).setValue(lexer.getFirstComment());
|
||||
}
|
||||
|
||||
if (isSimpleSyntaxEM(rule)) {
|
||||
rule.forceElement("source").makeElement("variable").setValue(AUTO_VAR_NAME);
|
||||
rule.forceElement("target").makeElement("variable").setValue(AUTO_VAR_NAME);
|
||||
rule.forceElement("target").makeElement("transform").setValue(StructureMapTransform.CREATE.toCode());
|
||||
// no dependencies - imply what is to be done based on types
|
||||
}
|
||||
if (newFmt) {
|
||||
if (lexer.isConstant()) {
|
||||
if (lexer.isStringConstant()) {
|
||||
rule.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("ruleName"));
|
||||
} else {
|
||||
rule.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
} else {
|
||||
if (rule.getChildrenByName("source").size() != 1 || !rule.getChildrenByName("source").get(0).hasChild("element"))
|
||||
throw lexer.error("Complex rules must have an explicit name");
|
||||
if (rule.getChildrenByName("source").get(0).hasChild("type"))
|
||||
rule.makeElement("name").setValue(rule.getChildrenByName("source").get(0).getNamedChildValue("element") + "-" + rule.getChildrenByName("source").get(0).getNamedChildValue("type"));
|
||||
else
|
||||
rule.makeElement("name").setValue(rule.getChildrenByName("source").get(0).getNamedChildValue("element"));
|
||||
}
|
||||
lexer.token(";");
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSimpleSyntax(StructureMapGroupRuleComponent rule) {
|
||||
return
|
||||
(rule.getSource().size() == 1 && rule.getSourceFirstRep().hasContext() && rule.getSourceFirstRep().hasElement() && !rule.getSourceFirstRep().hasVariable()) &&
|
||||
|
@ -1233,13 +967,6 @@ public class StructureMapUtilities {
|
|||
}
|
||||
|
||||
|
||||
private boolean isSimpleSyntaxEM(Element rule) {
|
||||
return
|
||||
(rule.getChildren("source").size() == 1 && rule.getChildren("source").get(0).hasChild("context") && rule.getChildren("source").get(0).hasChild("element") && !rule.getChildren("source").get(0).hasChild("variable")) &&
|
||||
(rule.getChildren("target").size() == 1 && rule.getChildren("target").get(0).hasChild("context") && rule.getChildren("target").get(0).hasChild("element") && !rule.getChildren("target").get(0).hasChild("variable") &&
|
||||
!rule.getChildren("target").get(0).hasChild("parameter")) &&
|
||||
(rule.getChildren("dependent").size() == 0 && rule.getChildren("rule").size() == 0);
|
||||
}
|
||||
|
||||
private void parseRuleReference(StructureMapGroupRuleComponent rule, FHIRLexer lexer) throws FHIRLexerException {
|
||||
StructureMapGroupRuleDependentComponent ref = rule.addDependent();
|
||||
|
@ -1255,19 +982,6 @@ public class StructureMapUtilities {
|
|||
lexer.token(")");
|
||||
}
|
||||
|
||||
private void parseRuleReferenceEM(Element rule, FHIRLexer lexer) throws FHIRLexerException {
|
||||
Element ref = rule.addElement("dependent");
|
||||
rule.makeElement("name").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
lexer.token("(");
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
parseParameterEM(ref, lexer);
|
||||
done = !lexer.hasToken(",");
|
||||
if (!done)
|
||||
lexer.next();
|
||||
}
|
||||
lexer.token(")");
|
||||
}
|
||||
|
||||
private void parseSource(StructureMapGroupRuleComponent rule, FHIRLexer lexer) throws FHIRException {
|
||||
StructureMapGroupRuleSourceComponent source = rule.addSource();
|
||||
|
@ -1287,12 +1001,12 @@ public class StructureMapUtilities {
|
|||
// type and cardinality
|
||||
lexer.token(":");
|
||||
source.setType(lexer.takeDottedToken());
|
||||
if (!lexer.hasToken("as", "first", "last", "not_first", "not_last", "only_one", "default")) {
|
||||
}
|
||||
if (Utilities.isInteger(lexer.getCurrent())) {
|
||||
source.setMin(lexer.takeInt());
|
||||
lexer.token("..");
|
||||
source.setMax(lexer.take());
|
||||
}
|
||||
}
|
||||
if (lexer.hasToken("default")) {
|
||||
lexer.token("default");
|
||||
source.setDefaultValue(lexer.readConstant("default value"));
|
||||
|
@ -1324,65 +1038,6 @@ public class StructureMapUtilities {
|
|||
}
|
||||
}
|
||||
|
||||
private void parseSourceEM(Element rule, FHIRLexer lexer) throws FHIRException {
|
||||
Element source = rule.addElement("source");
|
||||
source.makeElement("context").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
if (source.getChildValue("context").equals("search") && lexer.hasToken("(")) {
|
||||
source.makeElement("context").markLocation(lexer.getCurrentLocation()).setValue("@search");
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(MAP_SEARCH_EXPRESSION, node);
|
||||
source.makeElement("element").markLocation(loc).setValue(node.toString());
|
||||
lexer.token(")");
|
||||
} else if (lexer.hasToken(".")) {
|
||||
lexer.token(".");
|
||||
source.makeElement("element").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (lexer.hasToken(":")) {
|
||||
// type and cardinality
|
||||
lexer.token(":");
|
||||
source.setType(lexer.takeDottedToken());
|
||||
if (!lexer.hasToken("as", "first", "last", "not_first", "not_last", "only_one", "default")) {
|
||||
source.makeElement("min").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
lexer.token("..");
|
||||
source.makeElement("max").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
}
|
||||
if (lexer.hasToken("default")) {
|
||||
lexer.token("default");
|
||||
source.makeElement("defaultValue").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("default value"));
|
||||
}
|
||||
if (Utilities.existsInList(lexer.getCurrent(), "first", "last", "not_first", "not_last", "only_one")) {
|
||||
source.makeElement("listMode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
|
||||
if (lexer.hasToken("as")) {
|
||||
lexer.take();
|
||||
source.makeElement("variable").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
if (lexer.hasToken("where")) {
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(MAP_WHERE_EXPRESSION, node);
|
||||
source.makeElement("condition").markLocation(loc).setValue(node.toString());
|
||||
}
|
||||
if (lexer.hasToken("check")) {
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(MAP_WHERE_CHECK, node);
|
||||
source.makeElement("check").markLocation(loc).setValue(node.toString());
|
||||
}
|
||||
if (lexer.hasToken("log")) {
|
||||
lexer.take();
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
source.setUserData(MAP_WHERE_CHECK, node);
|
||||
source.makeElement("logMessage").markLocation(loc).setValue(lexer.take());
|
||||
}
|
||||
}
|
||||
|
||||
private void parseTarget(StructureMapGroupRuleComponent rule, FHIRLexer lexer) throws FHIRException {
|
||||
StructureMapGroupRuleTargetComponent target = rule.addTarget();
|
||||
|
@ -1458,84 +1113,6 @@ public class StructureMapUtilities {
|
|||
}
|
||||
}
|
||||
|
||||
private void parseTargetEM(Element rule, FHIRLexer lexer) throws FHIRException {
|
||||
Element target = rule.addElement("target");
|
||||
SourceLocation loc = lexer.getCurrentLocation();
|
||||
String start = lexer.take();
|
||||
if (lexer.hasToken(".")) {
|
||||
target.makeElement("context").markLocation(loc).setValue(start);
|
||||
start = null;
|
||||
lexer.token(".");
|
||||
target.makeElement("element").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
String name;
|
||||
boolean isConstant = false;
|
||||
if (lexer.hasToken("=")) {
|
||||
if (start != null) {
|
||||
target.makeElement("context").markLocation(loc).setValue(start);
|
||||
}
|
||||
lexer.token("=");
|
||||
isConstant = lexer.isConstant();
|
||||
loc = lexer.getCurrentLocation();
|
||||
name = lexer.take();
|
||||
} else {
|
||||
loc = lexer.getCurrentLocation();
|
||||
name = start;
|
||||
}
|
||||
|
||||
if ("(".equals(name)) {
|
||||
// inline fluentpath expression
|
||||
target.makeElement("transform").markLocation(lexer.getCurrentLocation()).setValue(StructureMapTransform.EVALUATE.toCode());
|
||||
loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
target.setUserData(MAP_EXPRESSION, node);
|
||||
target.addElement("parameter").markLocation(loc).setValue(node.toString());
|
||||
lexer.token(")");
|
||||
} else if (lexer.hasToken("(")) {
|
||||
target.makeElement("transform").markLocation(loc).setValue(name);
|
||||
lexer.token("(");
|
||||
if (target.getChildValue("transform").equals(StructureMapTransform.EVALUATE.toCode())) {
|
||||
parseParameterEM(target, lexer);
|
||||
lexer.token(",");
|
||||
loc = lexer.getCurrentLocation();
|
||||
ExpressionNode node = fpe.parse(lexer);
|
||||
target.setUserData(MAP_EXPRESSION, node);
|
||||
target.addElement("parameter").markLocation(loc).setValue(node.toString());
|
||||
} else {
|
||||
while (!lexer.hasToken(")")) {
|
||||
parseParameterEM(target, lexer);
|
||||
if (!lexer.hasToken(")"))
|
||||
lexer.token(",");
|
||||
}
|
||||
}
|
||||
lexer.token(")");
|
||||
} else if (name != null) {
|
||||
target.makeElement("transform").markLocation(loc).setValue(StructureMapTransform.COPY.toCode());
|
||||
if (!isConstant) {
|
||||
loc = lexer.getCurrentLocation();
|
||||
String id = name;
|
||||
while (lexer.hasToken(".")) {
|
||||
id = id + lexer.take() + lexer.take();
|
||||
}
|
||||
target.addElement("parameter").markLocation(loc).setValue(id);
|
||||
} else {
|
||||
target.addElement("parameter").markLocation(lexer.getCurrentLocation()).setValue(readConstantEM(name, lexer));
|
||||
}
|
||||
}
|
||||
if (lexer.hasToken("as")) {
|
||||
lexer.take();
|
||||
target.makeElement("variable").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
while (Utilities.existsInList(lexer.getCurrent(), "first", "last", "share", "collate")) {
|
||||
if (lexer.getCurrent().equals("share")) {
|
||||
target.makeElement("listMode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
target.makeElement("listRuleId").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
} else {
|
||||
target.makeElement("listMode").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void parseParameter(StructureMapGroupRuleDependentComponent ref, FHIRLexer lexer) throws FHIRLexerException, FHIRFormatError {
|
||||
if (!lexer.isConstant()) {
|
||||
|
@ -1557,15 +1134,6 @@ public class StructureMapUtilities {
|
|||
}
|
||||
}
|
||||
|
||||
private void parseParameterEM(Element ref, FHIRLexer lexer) throws FHIRLexerException, FHIRFormatError {
|
||||
if (!lexer.isConstant()) {
|
||||
ref.addElement("parameter").markLocation(lexer.getCurrentLocation()).setValue(lexer.take());
|
||||
} else if (lexer.isStringConstant())
|
||||
ref.addElement("parameter").markLocation(lexer.getCurrentLocation()).setValue(lexer.readConstant("??"));
|
||||
else {
|
||||
ref.addElement("parameter").markLocation(lexer.getCurrentLocation()).setValue(readConstantEM(lexer.take(), lexer));
|
||||
}
|
||||
}
|
||||
|
||||
private DataType readConstant(String s, FHIRLexer lexer) throws FHIRLexerException {
|
||||
if (Utilities.isInteger(s))
|
||||
|
@ -1578,16 +1146,6 @@ public class StructureMapUtilities {
|
|||
return new StringType(lexer.processConstant(s));
|
||||
}
|
||||
|
||||
private String readConstantEM(String s, FHIRLexer lexer) throws FHIRLexerException {
|
||||
if (Utilities.isInteger(s))
|
||||
return s;
|
||||
else if (Utilities.isDecimal(s, false))
|
||||
return s;
|
||||
else if (Utilities.existsInList(s, "true", "false"))
|
||||
return s;
|
||||
else
|
||||
return lexer.processConstant(s);
|
||||
}
|
||||
|
||||
public StructureDefinition getTargetType(StructureMap map) throws FHIRException {
|
||||
boolean found = false;
|
||||
|
|
|
@ -91,24 +91,6 @@ public class StructureMapUtilitiesTest implements ITransformerServices {
|
|||
assertSerializeDeserialize(map);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSyntaxEM() throws IOException, FHIRException {
|
||||
StructureMapUtilities scu = new StructureMapUtilities(context, this);
|
||||
String fileMap = TestingUtilities.loadTestResource("r5", "structure-mapping", "syntax.map");
|
||||
System.out.println(fileMap);
|
||||
|
||||
Element structureMap = scu.parseEM(fileMap, "Syntax", null);
|
||||
// assertSerializeDeserialize(structureMap);
|
||||
//
|
||||
// String renderedMap = StructureMapUtilities.render(structureMap);
|
||||
// StructureMap map = scu.parse(renderedMap, "Syntax");
|
||||
// System.out.println(map);
|
||||
// assertSerializeDeserialize(map);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void log(String message) {
|
||||
}
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -90,7 +90,7 @@
|
|||
<groupId>org.sonatype.plugins</groupId>
|
||||
<artifactId>nexus-staging-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
<skipStaging>true</skipStaging>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -45,7 +45,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
@ -159,7 +159,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>logging-interceptor</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
-->
|
||||
|
|
|
@ -69,6 +69,125 @@ public class MarkDownProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this is intended to be processed as markdown
|
||||
*
|
||||
* this is guess, based on textual analysis of the content.
|
||||
*
|
||||
* Uses of this routine:
|
||||
* In general, the main use of this is to decide to escape the string so erroneous markdown processing doesn't munge characters
|
||||
* If it's a plain string, and it's being put into something that's markdown, then you should escape the content
|
||||
* If it's markdown, but you're not sure whether to process it as markdown
|
||||
*
|
||||
* The underlying problem is that markdown processing plain strings is problematic because some technical characters might
|
||||
* get lost. So it's good to escape them... but if it's meant to be markdown, then it'll get trashed.
|
||||
*
|
||||
* This method works by looking for character patterns that are unlikely to occur outside markdown - but it's still only unlikely
|
||||
*
|
||||
* @param content
|
||||
* @return
|
||||
*/
|
||||
// todo: dialect dependency?
|
||||
public boolean isProbablyMarkdown(String content, boolean mdIfParagrapghs) {
|
||||
if (mdIfParagrapghs && content.contains("\n")) {
|
||||
return true;
|
||||
}
|
||||
String[] lines = content.split("\\r?\\n");
|
||||
for (String s : lines) {
|
||||
if (s.startsWith("* ") || isHeading(s) || s.startsWith("1. ") || s.startsWith(" ")) {
|
||||
return true;
|
||||
}
|
||||
if (s.contains("```") || s.contains("~~~") || s.contains("[[[")) {
|
||||
return true;
|
||||
}
|
||||
if (hasLink(s)) {
|
||||
return true;
|
||||
}
|
||||
if (hasTextSpecial(s, '*') || hasTextSpecial(s, '_') ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isHeading(String s) {
|
||||
if (s.length() > 7 && s.startsWith("###### ") && !Character.isWhitespace(s.charAt(7))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 6 && s.startsWith("##### ") && !Character.isWhitespace(s.charAt(6))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 5 && s.startsWith("#### ") && !Character.isWhitespace(s.charAt(5))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 4 && s.startsWith("### ") && !Character.isWhitespace(s.charAt(4))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 3 && s.startsWith("## ") && !Character.isWhitespace(s.charAt(3))) {
|
||||
return true;
|
||||
}
|
||||
//
|
||||
// not sure about this one. # [string] is something that could easily arise in non-markdown,
|
||||
// so this appearing isn't enough to call it markdown
|
||||
//
|
||||
// if (s.length() > 2 && s.startsWith("# ") && !Character.isWhitespace(s.charAt(2))) {
|
||||
// return true;
|
||||
// }
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private boolean hasLink(String s) {
|
||||
int left = -1;
|
||||
int mid = -1;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
char c = s.charAt(i);
|
||||
if (c == '[') {
|
||||
mid = -1;
|
||||
left = i;
|
||||
} else if (left > -1 && i < s.length()-1 && c == ']' && s.charAt(i+1) == '(') {
|
||||
mid = i;
|
||||
} else if (left > -1 && c == ']') {
|
||||
left = -1;
|
||||
} else if (left > -1 && mid > -1 && c == ')') {
|
||||
return true;
|
||||
} else if (mid > -1 && c == '[' || c == ']' || (c == '(' && i > mid+1)) {
|
||||
left = -1;
|
||||
mid = -1;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private boolean hasTextSpecial(String s, char c) {
|
||||
boolean second = false;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
char prev = i == 0 ? ' ' : s.charAt(i-1);
|
||||
char next = i < s.length() - 1 ? s.charAt(i+1) : ' ';
|
||||
if (s.charAt(i) != c) {
|
||||
// nothing
|
||||
} else if (second) {
|
||||
if (Character.isWhitespace(next) && (isPunctation(prev) || Character.isLetterOrDigit(prev))) {
|
||||
return true;
|
||||
}
|
||||
second = false;
|
||||
} else {
|
||||
if (Character.isWhitespace(prev) && (isPunctation(next) || Character.isLetterOrDigit(next))) {
|
||||
second = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private boolean isPunctation(char ch) {
|
||||
return Utilities.existsInList(ch, '.', ',', '!', '?');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This deals with a painful problem created by the intersection of previous publishing processes
|
||||
* and the way commonmark specifies that < is handled in content. For control reasons, the FHIR specification does
|
||||
|
|
|
@ -214,10 +214,18 @@ public class VersionUtilities {
|
|||
} else if (Utilities.charCount(version, '.') == 2) {
|
||||
String[] p = version.split("\\.");
|
||||
return p[0]+"."+p[1];
|
||||
} else {
|
||||
return null;
|
||||
} else if (Utilities.existsInList(version, "R2", "R2B", "R3", "R4", "R4B", "R5")) {
|
||||
switch (version) {
|
||||
case "R2": return "1.0";
|
||||
case "R2B": return "1.4";
|
||||
case "R3": return "3.0";
|
||||
case "R4": return "4.0";
|
||||
case "R4B": return "4.3";
|
||||
case "R5": return "5.0";
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String getPatch(String version) {
|
||||
if (version == null)
|
||||
|
|
|
@ -774,6 +774,44 @@ public class I18nConstants {
|
|||
public static final String EXT_VER_URL_REVERSION = "EXT_VER_URL_REVERSION";
|
||||
public static final String ILLEGAL_COMMENT_TYPE = "ILLEGAL_COMMENT_TYPE";
|
||||
public static final String SD_NO_SLICING_ON_ROOT = "SD_NO_SLICING_ON_ROOT";
|
||||
public static final String REFERENCE_REF_QUERY_INVALID = "REFERENCE_REF_QUERY_INVALID";
|
||||
public static final String SM_RULEGROUP_NOT_FOUND = "SM_RULEGROUP_NOT_FOUND";
|
||||
public static final String SM_NAME_INVALID = "SM_NAME_INVALID";
|
||||
public static final String SM_GROUP_INPUT_DUPLICATE = "SM_GROUP_INPUT_DUPLICATE";
|
||||
public static final String SM_GROUP_INPUT_MODE_INVALID = "SM_GROUP_INPUT_MODE_INVALID";
|
||||
public static final String SM_GROUP_INPUT_NO_TYPE = "SM_GROUP_INPUT_NO_TYPE";
|
||||
public static final String SM_GROUP_INPUT_TYPE_NOT_DECLARED = "SM_GROUP_INPUT_TYPE_NOT_DECLARED";
|
||||
public static final String SM_GROUP_INPUT_MODE_MISMATCH = "SM_GROUP_INPUT_MODE_MISMATCH";
|
||||
public static final String SM_GROUP_INPUT_TYPE_UNKNOWN_STRUCTURE = "SM_GROUP_INPUT_TYPE_UNKNOWN_STRUCTURE";
|
||||
public static final String SM_GROUP_INPUT_TYPE_UNKNOWN_TYPE = "SM_GROUP_INPUT_TYPE_UNKNOWN_TYPE";
|
||||
public static final String SM_SOURCE_CONTEXT_UNKNOWN = "SM_SOURCE_CONTEXT_UNKNOWN";
|
||||
public static final String SM_SOURCE_PATH_INVALID = "SM_SOURCE_PATH_INVALID";
|
||||
public static final String SM_RULE_SOURCE_MIN_REDUNDANT = "SM_RULE_SOURCE_MIN_REDUNDANT";
|
||||
public static final String SM_RULE_SOURCE_MAX_REDUNDANT = "SM_RULE_SOURCE_MAX_REDUNDANT";
|
||||
public static final String SM_RULE_SOURCE_LISTMODE_REDUNDANT = "SM_RULE_SOURCE_LISTMODE_REDUNDANT";
|
||||
public static final String SM_TARGET_CONTEXT_UNKNOWN = "SM_TARGET_CONTEXT_UNKNOWN";
|
||||
public static final String SM_TARGET_PATH_INVALID = "SM_TARGET_PATH_INVALID";
|
||||
public static final String SM_NO_LIST_MODE_NEEDED = "SM_NO_LIST_MODE_NEEDED";
|
||||
public static final String SM_NO_LIST_RULE_ID_NEEDED = "SM_NO_LIST_RULE_ID_NEEDED";
|
||||
public static final String SM_LIST_RULE_ID_ONLY_WHEN_SHARE = "SM_LIST_RULE_ID_ONLY_WHEN_SHARE";
|
||||
public static final String SM_RULE_SOURCE_UNASSIGNED = "SM_RULE_SOURCE_UNASSIGNED";
|
||||
public static final String SM_TARGET_PATH_MULTIPLE_MATCHES = "SM_TARGET_PATH_MULTIPLE_MATCHES";
|
||||
public static final String SM_SOURCE_TYPE_INVALID = "SM_SOURCE_TYPE_INVALID";
|
||||
public static final String SM_TARGET_TRANSFORM_PARAM_COUNT_SINGLE = "SM_TARGET_TRANSFORM_PARAM_COUNT_SINGLE";
|
||||
public static final String SM_TARGET_TRANSFORM_PARAM_COUNT_RANGE = "SM_TARGET_TRANSFORM_PARAM_COUNT_RANGE";
|
||||
public static final String SM_TARGET_TRANSFORM_NOT_CHECKED = "SM_TARGET_TRANSFORM_NOT_CHECKED";
|
||||
public static final String SM_TARGET_NO_TRANSFORM_NO_CHECKED = "SM_TARGET_NO_TRANSFORM_NO_CHECKED";
|
||||
public static final String SM_TARGET_TRANSFORM_TYPE_UNPROCESSIBLE = "SM_TARGET_TRANSFORM_TYPE_UNPROCESSIBLE";
|
||||
public static final String SM_TARGET_TRANSFORM_PARAM_UNPROCESSIBLE = "SM_TARGET_TRANSFORM_PARAM_UNPROCESSIBLE";
|
||||
public static final String SM_TARGET_TRANSFORM_EXPRESSION_ERROR = "SM_TARGET_TRANSFORM_EXPRESSION_ERROR";
|
||||
public static final String SM_IMPORT_NOT_FOUND = "SM_IMPORT_NOT_FOUND";
|
||||
public static final String SM_TARGET_TYPE_MULTIPLE_POSSIBLE = "SM_TARGET_TYPE_MULTIPLE_POSSIBLE";
|
||||
public static final String SM_DEPENDENT_PARAM_MODE_MISMATCH = "SM_DEPENDENT_PARAM_MODE_MISMATCH";
|
||||
public static final String SM_DEPENDENT_PARAM_TYPE_MISMATCH = "SM_DEPENDENT_PARAM_TYPE_MISMATCH";
|
||||
public static final String SM_ORPHAN_GROUP = "SM_ORPHAN_GROUP";
|
||||
public static final String SM_SOURCE_TYPE_NOT_FOUND = "SM_SOURCE_TYPE_NOT_FOUND";
|
||||
public static final String SM_TARGET_TYPE_NOT_FOUND = "SM_TARGET_TYPE_NOT_FOUND";
|
||||
public static final String SM_MATCHING_RULEGROUP_NOT_FOUND = "SM_MATCHING_RULEGROUP_NOT_FOUND";
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -27,19 +27,13 @@ public class PackageClient {
|
|||
|
||||
|
||||
private PackageServer server;
|
||||
private String cacheFolder;
|
||||
private String address;
|
||||
|
||||
|
||||
public PackageClient(PackageServer server) {
|
||||
super();
|
||||
this.server = server;
|
||||
address = server.getUrl();
|
||||
try {
|
||||
cacheFolder = Utilities.path(System.getProperty("user.home"), ".fhir", "package-client");
|
||||
Utilities.createDirectory(cacheFolder);
|
||||
} catch (IOException e) {
|
||||
}
|
||||
address = this.server.getUrl();
|
||||
}
|
||||
|
||||
public boolean exists(String id, String ver) throws IOException {
|
||||
|
|
|
@ -72,22 +72,35 @@ public class Turtle {
|
|||
protected List<Predicate> predicates = new ArrayList<Predicate>();
|
||||
|
||||
public Complex predicate(String predicate, String object) {
|
||||
return predicate(predicate, object, false);
|
||||
}
|
||||
|
||||
public Complex predicate(String predicate, String object, boolean asList) {
|
||||
predicateSet.add(predicate);
|
||||
objectSet.add(object);
|
||||
return predicate(predicate, new StringType(object));
|
||||
return predicate(predicate, new StringType(object), asList);
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, String object, String link, String comment) {
|
||||
return linkedPredicate(predicate, object, link, comment, false);
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, String object, String link, String comment, boolean asList) {
|
||||
predicateSet.add(predicate);
|
||||
objectSet.add(object);
|
||||
return linkedPredicate(predicate, new StringType(object), link, comment);
|
||||
return linkedPredicate(predicate, new StringType(object), link, comment, asList);
|
||||
}
|
||||
|
||||
public Complex predicate(String predicate, Triple object) {
|
||||
return predicate(predicate, object, false);
|
||||
}
|
||||
|
||||
public Complex predicate(String predicate, Triple object, boolean asList) {
|
||||
Predicate p = getPredicate(predicate);
|
||||
if (p == null) {
|
||||
p = new Predicate();
|
||||
p.predicate = predicate;
|
||||
p.asList = asList;
|
||||
predicateSet.add(predicate);
|
||||
predicates.add(p);
|
||||
}
|
||||
|
@ -105,12 +118,17 @@ public class Turtle {
|
|||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, Triple object, String link, String comment) {
|
||||
return linkedPredicate(predicate, object, link, comment, false);
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, Triple object, String link, String comment, boolean asList) {
|
||||
Predicate p = getPredicate(predicate);
|
||||
if (p == null) {
|
||||
p = new Predicate();
|
||||
p.predicate = predicate;
|
||||
p.link = link;
|
||||
p.comment = comment;
|
||||
p.asList = asList;
|
||||
predicateSet.add(predicate);
|
||||
predicates.add(p);
|
||||
}
|
||||
|
@ -121,16 +139,24 @@ public class Turtle {
|
|||
}
|
||||
|
||||
public Complex predicate(String predicate) {
|
||||
return predicate(predicate, false);
|
||||
}
|
||||
|
||||
public Complex predicate(String predicate, boolean asList) {
|
||||
predicateSet.add(predicate);
|
||||
Complex c = complex();
|
||||
predicate(predicate, c);
|
||||
predicate(predicate, c, asList);
|
||||
return c;
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, String link, String comment) {
|
||||
return linkedPredicate(predicate, link, comment, false);
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, String link, String comment, boolean asList) {
|
||||
predicateSet.add(predicate);
|
||||
Complex c = complex();
|
||||
linkedPredicate(predicate, c, link, comment);
|
||||
linkedPredicate(predicate, c, link, comment, asList);
|
||||
return c;
|
||||
}
|
||||
|
||||
|
@ -144,6 +170,7 @@ public class Turtle {
|
|||
protected String link;
|
||||
protected List<Triple> objects = new ArrayList<Turtle.Triple>();
|
||||
protected String comment;
|
||||
protected boolean asList = false;
|
||||
|
||||
public String getPredicate() {
|
||||
return predicate;
|
||||
|
@ -183,15 +210,15 @@ public class Turtle {
|
|||
|
||||
public void comment(String comment) {
|
||||
if (!Utilities.noString(comment)) {
|
||||
predicate("rdfs:comment", literal(comment));
|
||||
predicate("dcterms:description", literal(comment));
|
||||
predicate("rdfs:comment", literal(comment), false);
|
||||
predicate("dcterms:description", literal(comment), false);
|
||||
}
|
||||
}
|
||||
|
||||
public void label(String label) {
|
||||
if (!Utilities.noString(label)) {
|
||||
predicate("rdfs:label", literal(label));
|
||||
predicate("dc:title", literal(label));
|
||||
predicate("rdfs:label", literal(label), false);
|
||||
predicate("dc:title", literal(label), false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -445,7 +472,7 @@ public class Turtle {
|
|||
writer.ln();
|
||||
if (!section.comments.isEmpty()) {
|
||||
for (String s : section.comments) {
|
||||
writer.ln("# "+s);
|
||||
writer.ln("# "+formatMultilineComment(s));
|
||||
}
|
||||
writer.ln();
|
||||
}
|
||||
|
@ -464,10 +491,13 @@ public class Turtle {
|
|||
writer.write(" ");
|
||||
boolean first = true;
|
||||
for (Triple o : p.getObjects()) {
|
||||
if (first)
|
||||
if (first) {
|
||||
first = false;
|
||||
else
|
||||
writer.write(", ");
|
||||
if (p.asList) writer.write("( ");
|
||||
} else {
|
||||
if (!p.asList) writer.write(", ");
|
||||
else writer.write(" ");
|
||||
}
|
||||
if (o instanceof StringType)
|
||||
writer.write(((StringType) o).value);
|
||||
else {
|
||||
|
@ -478,10 +508,11 @@ public class Turtle {
|
|||
writer.write("]");
|
||||
}
|
||||
}
|
||||
String comment = p.comment == null? "" : " # "+p.comment;
|
||||
String comment = p.comment == null? "" : " # "+formatMultilineComment(p.comment);
|
||||
if (p.asList) writer.write(" )");
|
||||
i++;
|
||||
if (i < sbj.predicates.size())
|
||||
writer.write(";"+comment+"\r\n ");
|
||||
writer.write(" ;"+comment+"\r\n ");
|
||||
else {
|
||||
if (Utilities.noString(sbj.id))
|
||||
writer.write("]");
|
||||
|
@ -491,6 +522,11 @@ public class Turtle {
|
|||
}
|
||||
}
|
||||
|
||||
private String formatMultilineComment(String s) {
|
||||
return s.replace("\n", "\n#");
|
||||
}
|
||||
|
||||
|
||||
private void commitSection(StringBuilder b, Section section) throws Exception {
|
||||
b.append("# - "+section.name+" "+Utilities.padLeft("", '-', 75-section.name.length())+"\r\n");
|
||||
b.append("\r\n");
|
||||
|
@ -511,10 +547,12 @@ public class Turtle {
|
|||
b.append(" ");
|
||||
boolean first = true;
|
||||
for (Triple o : p.getObjects()) {
|
||||
if (first)
|
||||
if (first) {
|
||||
first = false;
|
||||
else
|
||||
b.append(", ");
|
||||
if (p.asList) b.append("( ");
|
||||
} else
|
||||
if (!p.asList) b.append(", ");
|
||||
else b.append(" ");
|
||||
if (o instanceof StringType)
|
||||
b.append(Utilities.escapeXml(((StringType) o).value));
|
||||
else {
|
||||
|
@ -525,12 +563,13 @@ public class Turtle {
|
|||
b.append("]");
|
||||
}
|
||||
}
|
||||
String comment = p.comment == null? "" : " # "+p.comment;
|
||||
String comment = p.comment == null? "" : " # "+formatMultilineComment(p.comment);
|
||||
if (p.asList) b.append(" )");
|
||||
i++;
|
||||
if (i < sbj.predicates.size())
|
||||
b.append(";"+Utilities.escapeXml(comment)+"\r\n ");
|
||||
b.append(" ;"+Utilities.escapeXml(comment)+"\r\n ");
|
||||
else
|
||||
b.append("."+Utilities.escapeXml(comment)+"\r\n\r\n");
|
||||
b.append(" ."+Utilities.escapeXml(comment)+"\r\n\r\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -566,8 +605,11 @@ public class Turtle {
|
|||
if (first) {
|
||||
first = false;
|
||||
writer.write(left+" "+po.getPredicate()+" ");
|
||||
} else
|
||||
writer.write(", ");
|
||||
if (po.asList) writer.write("( ");
|
||||
} else {
|
||||
if (!po.asList) writer.write(", ");
|
||||
else writer.write(" ");
|
||||
}
|
||||
if (o instanceof StringType)
|
||||
writer.write(((StringType) o).value);
|
||||
else {
|
||||
|
@ -578,9 +620,10 @@ public class Turtle {
|
|||
writer.write(" ]");
|
||||
}
|
||||
}
|
||||
if (po.asList) writer.write(" )");
|
||||
i++;
|
||||
if (i < complex.predicates.size())
|
||||
writer.write(";");
|
||||
writer.write(" ;");
|
||||
if (!Utilities.noString(po.comment))
|
||||
writer.write(" # "+escape(po.comment, false));
|
||||
}
|
||||
|
@ -602,9 +645,11 @@ public class Turtle {
|
|||
for (Triple o : po.getObjects()) {
|
||||
if (first) {
|
||||
first = false;
|
||||
b.append(left+" "+po.makelink()+" ");
|
||||
if (po.asList) b.append(left+"( ");
|
||||
b.append(po.makelink()+" ");
|
||||
} else
|
||||
b.append(", ");
|
||||
if (!po.asList) b.append(", ");
|
||||
else b.append(" ");
|
||||
if (o instanceof StringType)
|
||||
b.append(Utilities.escapeXml(((StringType) o).value));
|
||||
else {
|
||||
|
@ -615,9 +660,10 @@ public class Turtle {
|
|||
b.append(" ]");
|
||||
}
|
||||
}
|
||||
if (po.asList) b.append(" )");
|
||||
i++;
|
||||
if (i < complex.predicates.size())
|
||||
b.append(";");
|
||||
b.append(" ;");
|
||||
if (!Utilities.noString(po.comment))
|
||||
b.append(" # "+Utilities.escapeXml(escape(po.comment, false)));
|
||||
}
|
||||
|
|
|
@ -83,9 +83,14 @@ import org.commonmark.renderer.html.HtmlRenderer;
|
|||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.utilities.TranslatingUtilities;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.xhtml.HierarchicalTableGenerator.TableGenerationMode;
|
||||
|
||||
|
||||
public class HierarchicalTableGenerator extends TranslatingUtilities {
|
||||
public enum TableGenerationMode {
|
||||
XML, XHTML
|
||||
}
|
||||
|
||||
public static final String TEXT_ICON_REFERENCE = "Reference to another Resource";
|
||||
public static final String TEXT_ICON_PRIMITIVE = "Primitive Data Type";
|
||||
public static final String TEXT_ICON_KEY = "JSON Key Value";
|
||||
|
@ -599,6 +604,8 @@ public class HierarchicalTableGenerator extends TranslatingUtilities {
|
|||
*/
|
||||
private boolean inLineGraphics;
|
||||
|
||||
private TableGenerationMode mode;
|
||||
|
||||
public HierarchicalTableGenerator() {
|
||||
super();
|
||||
}
|
||||
|
@ -626,7 +633,9 @@ public class HierarchicalTableGenerator extends TranslatingUtilities {
|
|||
checkSetup();
|
||||
}
|
||||
|
||||
public TableModel initNormalTable(String prefix, boolean isLogical, boolean alternating, String id, boolean isActive) {
|
||||
public TableModel initNormalTable(String prefix, boolean isLogical, boolean alternating, String id, boolean isActive, TableGenerationMode mode) {
|
||||
this.mode = mode;
|
||||
|
||||
TableModel model = new TableModel(id, isActive);
|
||||
|
||||
model.setAlternating(alternating);
|
||||
|
@ -686,12 +695,16 @@ public class HierarchicalTableGenerator extends TranslatingUtilities {
|
|||
tr.setAttribute("style", "border: " + Integer.toString(1 + border) + "px #F0F0F0 solid; font-size: 11px; font-family: verdana; vertical-align: top");
|
||||
XhtmlNode tc = null;
|
||||
for (Title t : model.getTitles()) {
|
||||
tc = renderCell(tr, t, "th", null, null, null, false, null, "white", 0, imagePath, border, outputTracker, model, null);
|
||||
tc = renderCell(tr, t, "th", null, null, null, false, null, "white", 0, imagePath, border, outputTracker, model, null, true);
|
||||
if (t.width != 0)
|
||||
tc.setAttribute("style", "width: "+Integer.toString(t.width)+"px");
|
||||
}
|
||||
if (tc != null && model.getDocoRef() != null) {
|
||||
XhtmlNode img = tc.addTag("span").setAttribute("style", "float: right").addTag("a").setAttribute("title", "Legend for this format").setAttribute("href", model.getDocoRef()).addTag("img");
|
||||
XhtmlNode a = tc.addTag("span").setAttribute("style", "float: right").addTag("a").setAttribute("title", "Legend for this format").setAttribute("href", model.getDocoRef());
|
||||
if (mode == TableGenerationMode.XHTML) {
|
||||
a.setAttribute("no-external", "true");
|
||||
}
|
||||
XhtmlNode img = a.addTag("img");
|
||||
img.setAttribute("alt", "doco").setAttribute("style", "background-color: inherit").setAttribute("src", model.getDocoImg());
|
||||
if (model.isActive()) {
|
||||
img.setAttribute("onLoad", "fhirTableInit(this)");
|
||||
|
@ -732,7 +745,7 @@ public class HierarchicalTableGenerator extends TranslatingUtilities {
|
|||
}
|
||||
boolean first = true;
|
||||
for (Cell t : r.getCells()) {
|
||||
renderCell(tr, t, "td", first ? r.getIcon() : null, first ? r.getHint() : null, first ? indents : null, !r.getSubRows().isEmpty(), first ? r.getAnchor() : null, color, r.getLineColor(), imagePath, border, outputTracker, model, r);
|
||||
renderCell(tr, t, "td", first ? r.getIcon() : null, first ? r.getHint() : null, first ? indents : null, !r.getSubRows().isEmpty(), first ? r.getAnchor() : null, color, r.getLineColor(), imagePath, border, outputTracker, model, r, first);
|
||||
first = false;
|
||||
}
|
||||
table.addText("\r\n");
|
||||
|
@ -751,7 +764,7 @@ public class HierarchicalTableGenerator extends TranslatingUtilities {
|
|||
}
|
||||
|
||||
|
||||
private XhtmlNode renderCell(XhtmlNode tr, Cell c, String name, String icon, String hint, List<Integer> indents, boolean hasChildren, String anchor, String color, int lineColor, String imagePath, int border, Set<String> outputTracker, TableModel table, Row row) throws IOException {
|
||||
private XhtmlNode renderCell(XhtmlNode tr, Cell c, String name, String icon, String hint, List<Integer> indents, boolean hasChildren, String anchor, String color, int lineColor, String imagePath, int border, Set<String> outputTracker, TableModel table, Row row, boolean suppressExternals) throws IOException {
|
||||
XhtmlNode tc = tr.addTag(name);
|
||||
tc.setAttribute("class", "hierarchy");
|
||||
if (c.span > 1) {
|
||||
|
@ -837,6 +850,9 @@ public class HierarchicalTableGenerator extends TranslatingUtilities {
|
|||
} else if (!Utilities.noString(p.getReference())) {
|
||||
XhtmlNode a = addStyle(tc.addTag("a"), p);
|
||||
a.setAttribute("href", p.getReference());
|
||||
if (mode == TableGenerationMode.XHTML && suppressExternals) {
|
||||
a.setAttribute("no-external", "true");
|
||||
}
|
||||
if (!Utilities.noString(p.getHint()))
|
||||
a.setAttribute("title", p.getHint());
|
||||
if (p.getText() != null) {
|
||||
|
|
|
@ -150,7 +150,9 @@ public abstract class XhtmlFluent {
|
|||
|
||||
|
||||
public XhtmlNode code(String text) {
|
||||
return addTag("code").tx(text);
|
||||
XhtmlNode code = addTag("code");
|
||||
code.tx(text);
|
||||
return code;
|
||||
}
|
||||
|
||||
public XhtmlNode code() {
|
||||
|
|
|
@ -824,5 +824,45 @@ EXT_VER_URL_NOT_ALLOWED = The extension URL must not contain a version
|
|||
EXT_VER_URL_REVERSION = The extension URL must not contain a version. The extension was validated against version {0} of the extension
|
||||
ILLEGAL_COMMENT_TYPE = The fhir_comments property must be an array of strings
|
||||
SD_NO_SLICING_ON_ROOT = Slicing is not allowed at the root of a profile
|
||||
REFERENCE_REF_QUERY_INVALID = The query part of the conditional reference is not a valid query string ({0})
|
||||
SM_RULEGROUP_NOT_FOUND = The group {0} could not be resolved
|
||||
SM_NAME_INVALID = The name {0} is not valid
|
||||
SM_GROUP_INPUT_DUPLICATE = The name {0} is already used
|
||||
SM_GROUP_INPUT_MODE_INVALID = The group parameter {0} mode {1} isn''t valid
|
||||
SM_GROUP_INPUT_NO_TYPE = The group parameter {0} has no type, so the paths cannot be validated
|
||||
SM_GROUP_INPUT_TYPE_NOT_DECLARED = The type {0} is not declared and is unknown
|
||||
SM_GROUP_INPUT_MODE_MISMATCH = The type {0} has mode {1} which doesn''t match the structure definition {2}
|
||||
SM_GROUP_INPUT_TYPE_UNKNOWN_STRUCTURE = The type {0} which maps to the canonical URL {1} is not known, so the paths cannot be validated
|
||||
SM_GROUP_INPUT_TYPE_UNKNOWN_TYPE = The type {0} is not known, so the paths cannot be validated
|
||||
SM_SOURCE_CONTEXT_UNKNOWN = The source context {0} is not known at this point
|
||||
SM_SOURCE_PATH_INVALID = The source path {0}.{1} refers to the path {2} which is unknown
|
||||
SM_RULE_SOURCE_MIN_REDUNDANT = The min value of {0} is redundant since the valid min is {0}
|
||||
SM_RULE_SOURCE_MAX_REDUNDANT = The max value of {0} is redundant since the valid max is {0}
|
||||
SM_RULE_SOURCE_LISTMODE_REDUNDANT = The listMode value of {0} is redundant since the valid max is {0}
|
||||
SM_TARGET_CONTEXT_UNKNOWN = The target context {0} is not known at this point
|
||||
SM_TARGET_PATH_INVALID = The target path {0}.{1} refers to the path {2} which is unknown
|
||||
SM_NO_LIST_MODE_NEEDED = A list mode should not be provided since this is a rule that can only be executed once
|
||||
SM_NO_LIST_RULE_ID_NEEDED = A list ruleId should not be provided since this is a rule that can only be executed once
|
||||
SM_LIST_RULE_ID_ONLY_WHEN_SHARE = A ruleId should only be provided when the rule mode is ''share''
|
||||
SM_RULE_SOURCE_UNASSIGNED = The source statement doesn''t assign a variable to the source - check that this is what is intended
|
||||
SM_TARGET_PATH_MULTIPLE_MATCHES = The target path {0}.{1} refers to the path {2} which is could be a reference to multiple elements ({3}). No further checking can be performed
|
||||
SM_SOURCE_TYPE_INVALID = The type {0} is not valid in this source context {1}. The possible types are [{2}]
|
||||
SM_TARGET_TRANSFORM_PARAM_COUNT_RANGE = Transform {0} takes {1}-{2} parameter(s) but {3} were found
|
||||
SM_TARGET_TRANSFORM_PARAM_COUNT_SINGLE = Transform {0} takes {1} parameter(s) but {2} were found
|
||||
SM_TARGET_TRANSFORM_NOT_CHECKED = Transform {0} not checked yet
|
||||
SM_TARGET_NO_TRANSFORM_NO_CHECKED = When there is no transform, parameters can''t be provided
|
||||
SM_TARGET_TRANSFORM_TYPE_UNPROCESSIBLE = The value of the type parameter could not be processed
|
||||
SM_TARGET_TRANSFORM_PARAM_UNPROCESSIBLE = The parameter at index {0} could not be processed (type = {1})
|
||||
SM_TARGET_TRANSFORM_EXPRESSION_ERROR = The FHIRPath expression passed as the evaluate parameter is invalid: {0}
|
||||
SM_IMPORT_NOT_FOUND = No maps were found to match {0} - validation may be wrong
|
||||
SM_TARGET_TYPE_MULTIPLE_POSSIBLE = Multiple types are possible here ({0}) so further type checking is not possible
|
||||
SM_DEPENDENT_PARAM_MODE_MISMATCH = The parameter {0} refers to the variable {1} but it''s mode is {2} which is not the same as the mode required for the group {3}
|
||||
SM_DEPENDENT_PARAM_TYPE_MISMATCH = The parameter {0} refers to the variable {1} but it''s type is {2} which is not compatible with the type required for the group {3}
|
||||
SM_ORPHAN_GROUP = This group is not called from within this mapping script, and does not have types on it's inputs, so type verification is not possible
|
||||
SM_SOURCE_TYPE_NOT_FOUND = No source type was found, so the default group for this implied dependent rule could not be determined
|
||||
SM_TARGET_TYPE_NOT_FOUND = No target type was found, so the default group for this implied dependent rule could not be determined
|
||||
SM_MATCHING_RULEGROUP_NOT_FOUND = Unable to find a default rule for the type pair source={0} and target={1}
|
||||
|
||||
|
||||
|
||||
|
|
@ -30,7 +30,7 @@ public class FTPClientTest implements ResourceLoaderTests {
|
|||
public static final String DUMMY_FILE_TO_DELETE = "dummyFileToDelete";
|
||||
|
||||
public static final String DUMMY_FILE_TO_UPLOAD = "dummyFileToUpload";
|
||||
public static final int FAKE_FTP_PORT = 8021;
|
||||
public static final int FAKE_FTP_PORT = 8022;
|
||||
public static final String DUMMY_FILE_CONTENT = "Dummy file content\nMore content\n";
|
||||
public static final String LOCALHOST = "localhost";
|
||||
|
||||
|
|
|
@ -2,15 +2,72 @@ package org.hl7.fhir.utilities;
|
|||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import org.hl7.fhir.utilities.MarkDownProcessor.Dialect;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class MarkdownTests {
|
||||
|
||||
@Test
|
||||
void testMarkdownDetection() {
|
||||
testMarkdown("this is a test string", false);
|
||||
testMarkdown("this is a \r\ntest string", false);
|
||||
testMarkdown("this is a \r\ntest string", true, true);
|
||||
testMarkdown("this is a t*est strin*g", false);
|
||||
testMarkdown("this is a *test strin*g", false);
|
||||
testMarkdown("this is a *test string*", true);
|
||||
testMarkdown("this is a *test *string", false);
|
||||
testMarkdown("this is a *test* string", true);
|
||||
testMarkdown("this [is] a test string", false);
|
||||
testMarkdown("this [is](link) a test string", true);
|
||||
testMarkdown("this [is](link a test string", false);
|
||||
testMarkdown("this [is] (link) a test string", false);
|
||||
testMarkdown("this [is(link)] a test string", false);
|
||||
testMarkdown("this [is](link a test string", false);
|
||||
testMarkdown("this [i]s] (link) a test string", false);
|
||||
testMarkdown("## heading", true);
|
||||
testMarkdown("# heading", false);
|
||||
testMarkdown("## heading", false);
|
||||
testMarkdown("###", false);
|
||||
}
|
||||
|
||||
private void testMarkdown(String content, boolean isMD) {
|
||||
testMarkdown(content, isMD, false);
|
||||
}
|
||||
|
||||
private void testMarkdown(String content, boolean isMD, boolean ifLines) {
|
||||
boolean test = new MarkDownProcessor(Dialect.COMMON_MARK).isProbablyMarkdown(content, ifLines);
|
||||
assertEquals(isMD, test);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStringToMarkdown() {
|
||||
// first, we test the need for replacing
|
||||
Assertions.assertEquals("<p>This is a string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is a string", null).trim());
|
||||
Assertions.assertEquals("<p>This is *a string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is *a string", null).trim());
|
||||
Assertions.assertNotEquals("<p>This is *a* string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is *a* string", null).trim());
|
||||
Assertions.assertEquals("<p>This is *a *string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is *a *string", null).trim());
|
||||
|
||||
Assertions.assertNotEquals("<p>This genomic study analyzes CYP2D6*1 and CYP2D6*2</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This genomic study analyzes CYP2D6*1 and CYP2D6*2", null).trim());
|
||||
Assertions.assertEquals("<p>This genomic study analyzes CYP2D6*1 and CYP2D6*2</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This genomic study analyzes CYP2D6*1 and CYP2D6\\*2", null).trim());
|
||||
|
||||
|
||||
Assertions.assertEquals("This is \\*a test\\*", MarkDownProcessor.makeStringSafeAsMarkdown("This is *a test*"));
|
||||
Assertions.assertEquals("This is *a test*", MarkDownProcessor.makeMarkdownForString("This is \\*a test\\*"));
|
||||
}
|
||||
|
||||
}
|
||||
//
|
||||
//case '*':
|
||||
//case '&':
|
||||
//case '#':
|
||||
//case '[':
|
||||
//case '>':
|
||||
//case '<':
|
||||
//case '`':
|
||||
// -
|
||||
// |
|
||||
// :
|
||||
// ~
|
||||
// ^
|
||||
// =
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -225,7 +225,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>mockwebserver</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>false</optional>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
@ -273,7 +273,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.atlassian.commonmark</groupId>
|
||||
|
@ -310,7 +310,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>3.1.2</version>
|
||||
<version>${maven-jar-plugin.version}</version>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
</suppressions>
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -154,7 +154,7 @@
|
|||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>4.9.0</version>
|
||||
<version>${okhttp.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
|
|
@ -1065,7 +1065,7 @@ public class BaseValidator implements IValidationContextResourceLoader {
|
|||
if (!context.getResourceNames().contains(tn)) {
|
||||
return false;
|
||||
} else {
|
||||
return q.matches("([_a-zA-Z][_a-zA-Z0-9]*=[^=&]+)(&([_a-zA-Z][_a-zA-Z0-9]*=[^=&]+))*");
|
||||
return q.matches("([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*)(&([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*))*");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.hl7.fhir.convertors.factory.VersionConvertorFactory_30_50;
|
|||
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_40_50;
|
||||
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_43_50;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.r5.context.IWorkerContext.IContextResourceLoader;
|
||||
import org.hl7.fhir.r5.context.SimpleWorkerContext;
|
||||
import org.hl7.fhir.r5.elementmodel.Manager;
|
||||
import org.hl7.fhir.r5.formats.JsonParser;
|
||||
|
@ -112,7 +113,9 @@ public class IgLoader {
|
|||
if (!srcPackage.contains("#")) {
|
||||
System.out.print("#" + npm.version());
|
||||
}
|
||||
int count = getContext().loadFromPackage(npm, ValidatorUtils.loaderForVersion(npm.fhirVersion()));
|
||||
IContextResourceLoader loader = ValidatorUtils.loaderForVersion(npm.fhirVersion());
|
||||
loader.setPatchUrls(VersionUtilities.isCorePackage(npm.id()));
|
||||
int count = getContext().loadFromPackage(npm, loader);
|
||||
System.out.println(" - " + count + " resources (" + getContext().clock().milestone() + ")");
|
||||
} else {
|
||||
System.out.print(" Load " + srcPackage);
|
||||
|
@ -183,8 +186,10 @@ public class IgLoader {
|
|||
res.cntType = Manager.FhirFormat.TURTLE;
|
||||
else if (t.getKey().endsWith(".shc"))
|
||||
res.cntType = Manager.FhirFormat.SHC;
|
||||
else if (t.getKey().endsWith(".txt") || t.getKey().endsWith(".map"))
|
||||
else if (t.getKey().endsWith(".txt"))
|
||||
res.cntType = Manager.FhirFormat.TEXT;
|
||||
else if (t.getKey().endsWith(".fml") || t.getKey().endsWith(".map"))
|
||||
res.cntType = Manager.FhirFormat.FML;
|
||||
else
|
||||
throw new FHIRException("Todo: Determining resource type is not yet done");
|
||||
}
|
||||
|
@ -762,6 +767,7 @@ public class IgLoader {
|
|||
if (isDebug() || ((e.getMessage() != null && e.getMessage().contains("cannot be cast")))) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
e.printStackTrace();
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
@ -774,7 +780,7 @@ public class IgLoader {
|
|||
res = new org.hl7.fhir.dstu3.formats.XmlParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".json") && !fn.endsWith("template.json"))
|
||||
res = new org.hl7.fhir.dstu3.formats.JsonParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map"))
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map") || fn.endsWith(".fml"))
|
||||
res = new org.hl7.fhir.dstu3.utils.StructureMapUtilities(null).parse(new String(content));
|
||||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
|
@ -785,7 +791,7 @@ public class IgLoader {
|
|||
res = new org.hl7.fhir.r4.formats.XmlParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".json") && !fn.endsWith("template.json"))
|
||||
res = new org.hl7.fhir.r4.formats.JsonParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map"))
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map") || fn.endsWith(".fml"))
|
||||
res = new org.hl7.fhir.r4.utils.StructureMapUtilities(null).parse(new String(content), fn);
|
||||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
|
@ -796,7 +802,7 @@ public class IgLoader {
|
|||
res = new org.hl7.fhir.r4b.formats.XmlParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".json") && !fn.endsWith("template.json"))
|
||||
res = new org.hl7.fhir.r4b.formats.JsonParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map"))
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map") || fn.endsWith(".fml"))
|
||||
res = new org.hl7.fhir.r4b.utils.structuremap.StructureMapUtilities(null).parse(new String(content), fn);
|
||||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
|
@ -819,15 +825,15 @@ public class IgLoader {
|
|||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
r = VersionConvertorFactory_10_50.convertResource(res, new org.hl7.fhir.convertors.misc.IGR2ConvertorAdvisor5());
|
||||
} else if (fhirVersion.startsWith("5.0")) {
|
||||
} else if (fhirVersion.startsWith("5.0") || "current".equals(fhirVersion)) {
|
||||
if (fn.endsWith(".xml") && !fn.endsWith("template.xml"))
|
||||
r = new XmlParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".json") && !fn.endsWith("template.json"))
|
||||
r = new JsonParser().parse(new ByteArrayInputStream(content));
|
||||
else if (fn.endsWith(".txt"))
|
||||
r = new StructureMapUtilities(getContext(), null, null).parse(TextFile.bytesToString(content), fn);
|
||||
else if (fn.endsWith(".map"))
|
||||
r = new StructureMapUtilities(null).parse(new String(content), fn);
|
||||
else if (fn.endsWith(".map") || fn.endsWith(".fml"))
|
||||
r = new StructureMapUtilities(context).parse(new String(content), fn);
|
||||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
} else
|
||||
|
|
|
@ -43,8 +43,8 @@ public class ResourceChecker {
|
|||
if (Utilities.existsInList(ext, "ttl")) {
|
||||
return FhirFormat.TURTLE;
|
||||
}
|
||||
if (Utilities.existsInList(ext, "map")) {
|
||||
return Manager.FhirFormat.TEXT;
|
||||
if (Utilities.existsInList(ext, "map", "fml")) {
|
||||
return Manager.FhirFormat.FML;
|
||||
}
|
||||
if (Utilities.existsInList(ext, "jwt", "jws")) {
|
||||
return Manager.FhirFormat.SHC;
|
||||
|
|
|
@ -219,6 +219,8 @@ public class ValidationEngine implements IValidatorResourceFetcher, IValidationP
|
|||
@Getter @Setter private Coding jurisdiction;
|
||||
|
||||
|
||||
private ContextUtilities cu = null;
|
||||
|
||||
/**
|
||||
* Creating a validation engine is an expensive operation - takes seconds.
|
||||
* Once you have a validation engine created, you can quickly clone it to
|
||||
|
@ -842,7 +844,7 @@ public class ValidationEngine implements IValidatorResourceFetcher, IValidationP
|
|||
new org.hl7.fhir.dstu3.formats.XmlParser().setOutputStyle(org.hl7.fhir.dstu3.formats.IParser.OutputStyle.PRETTY).compose(s, res);
|
||||
else if (fn.endsWith(".json") && !fn.endsWith("template.json"))
|
||||
new org.hl7.fhir.dstu3.formats.JsonParser().setOutputStyle(org.hl7.fhir.dstu3.formats.IParser.OutputStyle.PRETTY).compose(s, res);
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map"))
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map") || fn.endsWith(".fml"))
|
||||
TextFile.stringToStream(org.hl7.fhir.dstu3.utils.StructureMapUtilities.render((org.hl7.fhir.dstu3.model.StructureMap) res), s, false);
|
||||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
|
@ -852,7 +854,7 @@ public class ValidationEngine implements IValidatorResourceFetcher, IValidationP
|
|||
new org.hl7.fhir.r4.formats.XmlParser().setOutputStyle(org.hl7.fhir.r4.formats.IParser.OutputStyle.PRETTY).compose(s, res);
|
||||
else if (fn.endsWith(".json") && !fn.endsWith("template.json"))
|
||||
new org.hl7.fhir.r4.formats.JsonParser().setOutputStyle(org.hl7.fhir.r4.formats.IParser.OutputStyle.PRETTY).compose(s, res);
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map"))
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map") || fn.endsWith(".fml"))
|
||||
TextFile.stringToStream(org.hl7.fhir.r4.utils.StructureMapUtilities.render((org.hl7.fhir.r4.model.StructureMap) res), s, false);
|
||||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
|
@ -877,7 +879,7 @@ public class ValidationEngine implements IValidatorResourceFetcher, IValidationP
|
|||
new XmlParser().setOutputStyle(org.hl7.fhir.r5.formats.IParser.OutputStyle.PRETTY).compose(s, r);
|
||||
else if (fn.endsWith(".json") && !fn.endsWith("template.json"))
|
||||
new JsonParser().setOutputStyle(org.hl7.fhir.r5.formats.IParser.OutputStyle.PRETTY).compose(s, r);
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map"))
|
||||
else if (fn.endsWith(".txt") || fn.endsWith(".map") || fn.endsWith(".fml"))
|
||||
TextFile.stringToStream(StructureMapUtilities.render((org.hl7.fhir.r5.model.StructureMap) r), s, false);
|
||||
else
|
||||
throw new FHIRException("Unsupported format for " + fn);
|
||||
|
@ -1061,6 +1063,16 @@ public class ValidationEngine implements IValidatorResourceFetcher, IValidationP
|
|||
resolvedUrls.put(type+"|"+url, false);
|
||||
return false; // todo... how to access settings from here?
|
||||
}
|
||||
if (url.contains("*") && !url.contains("?")) {
|
||||
if (cu == null) {
|
||||
cu = new ContextUtilities(context);
|
||||
}
|
||||
List<StructureMap> maps = cu.listMaps(url);
|
||||
if (!maps.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
if (fetcher != null) {
|
||||
try {
|
||||
boolean ok = fetcher.resolveURL(validator, appContext, path, url, type);
|
||||
|
|
|
@ -105,6 +105,8 @@ public class StandAloneValidatorFetcher implements IValidatorResourceFetcher, IV
|
|||
}
|
||||
if (base.equals("http://terminology.hl7.org")) {
|
||||
pid = "hl7.terminology";
|
||||
} else if (base.equals("http://hl7.org/fhir")) {
|
||||
return false;
|
||||
} else if (url.startsWith("http://hl7.org/fhir")) {
|
||||
pid = pcm.getPackageId(base);
|
||||
} else {
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.hl7.fhir.validation.cli.model.HtmlInMarkdownCheck;
|
|||
public class Params {
|
||||
|
||||
public static final String VERSION = "-version";
|
||||
public static final String ALT_VERSION = "-alt-version";
|
||||
public static final String OUTPUT = "-output";
|
||||
|
||||
public static final String OUTPUT_SUFFIX = "-outputSuffix";
|
||||
|
@ -306,10 +307,30 @@ public class Params {
|
|||
String version = Common.getVersionFromIGName(null, s);
|
||||
if (version == null) {
|
||||
cliContext.addIg(s);
|
||||
} else {
|
||||
String v = getParam(args, VERSION);
|
||||
if (v != null && !v.equals(version)) {
|
||||
throw new Error("Parameters are inconsistent: specified version is "+v+" but -ig parameter "+s+" implies a different version");
|
||||
} else if (cliContext.getSv() != null && !version.equals(cliContext.getSv())) {
|
||||
throw new Error("Parameters are inconsistent: multiple -ig parameters implying differetion versions ("+cliContext.getSv()+","+version+")");
|
||||
} else {
|
||||
cliContext.setSv(version);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (args[i].equals(ALT_VERSION)) {
|
||||
if (i + 1 == args.length)
|
||||
throw new Error("Specified " + args[i] + " without indicating version");
|
||||
else {
|
||||
String s = args[++i];
|
||||
String v = VersionUtilities.getMajMin(s);
|
||||
if (v == null) {
|
||||
throw new Error("Unsupported FHIR Version "+s);
|
||||
}
|
||||
String pid = VersionUtilities.packageForVersion(v);
|
||||
pid = pid + "#"+VersionUtilities.getCurrentPackageVersion(v);
|
||||
cliContext.addIg(pid);
|
||||
}
|
||||
} else if (args[i].equals(MAP)) {
|
||||
if (cliContext.getMap() == null) {
|
||||
if (i + 1 == args.length)
|
||||
|
@ -336,6 +357,7 @@ public class Params {
|
|||
cliContext.addSource(args[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return cliContext;
|
||||
}
|
||||
|
||||
|
|
|
@ -189,6 +189,7 @@ import org.hl7.fhir.validation.instance.type.MeasureValidator;
|
|||
import org.hl7.fhir.validation.instance.type.QuestionnaireValidator;
|
||||
import org.hl7.fhir.validation.instance.type.SearchParameterValidator;
|
||||
import org.hl7.fhir.validation.instance.type.StructureDefinitionValidator;
|
||||
import org.hl7.fhir.validation.instance.type.StructureMapValidator;
|
||||
import org.hl7.fhir.validation.instance.type.ValueSetValidator;
|
||||
import org.hl7.fhir.validation.instance.utils.ChildIterator;
|
||||
import org.hl7.fhir.validation.instance.utils.ElementInfo;
|
||||
|
@ -3262,6 +3263,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
refType = "bundled";
|
||||
}
|
||||
}
|
||||
boolean conditional = ref.contains("?") && Utilities.existsInList(ref.substring(0, ref.indexOf("?")), context.getResourceNames());
|
||||
ReferenceValidationPolicy pol;
|
||||
if (refType.equals("contained") || refType.equals("bundled")) {
|
||||
pol = ReferenceValidationPolicy.CHECK_VALID;
|
||||
|
@ -3273,7 +3275,13 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
}
|
||||
}
|
||||
|
||||
if (pol.checkExists()) {
|
||||
if (conditional) {
|
||||
String query = ref.substring(ref.indexOf("?"));
|
||||
boolean test = !Utilities.noString(query) && query.matches("\\?([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*)(&([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*))*");
|
||||
//("^\\?([\\w-]+(=[\\w-]*)?(&[\\w-]+(=[\\w-]*)?)*)?$"),
|
||||
ok = rule(errors, "2023-02-20", IssueType.INVALID, element.line(), element.col(), path, test, I18nConstants.REFERENCE_REF_QUERY_INVALID, ref) && ok;
|
||||
|
||||
} else if (pol.checkExists()) {
|
||||
if (we == null) {
|
||||
if (!refType.equals("contained")) {
|
||||
if (fetcher == null) {
|
||||
|
@ -3766,7 +3774,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
}
|
||||
|
||||
TypedElementDefinition ted = null;
|
||||
String fp = FHIRPathExpressionFixer.fixExpr(discriminator, null);
|
||||
String fp = FHIRPathExpressionFixer.fixExpr(discriminator, null, context.getVersion());
|
||||
ExpressionNode expr = null;
|
||||
try {
|
||||
expr = fpe.parse(fp);
|
||||
|
@ -4384,7 +4392,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
}
|
||||
|
||||
try {
|
||||
n = fpe.parse(FHIRPathExpressionFixer.fixExpr(expression.toString(), null));
|
||||
n = fpe.parse(FHIRPathExpressionFixer.fixExpr(expression.toString(), null, context.getVersion()));
|
||||
} catch (FHIRLexerException e) {
|
||||
if (STACK_TRACE) e.printStackTrace();
|
||||
throw new FHIRException(context.formatMessage(I18nConstants.PROBLEM_PROCESSING_EXPRESSION__IN_PROFILE__PATH__, expression, profile.getVersionedUrl(), path, e.getMessage()));
|
||||
|
@ -4993,6 +5001,8 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
return new SearchParameterValidator(context, timeTracker, fpe, xverManager, jurisdiction).validateSearchParameter(errors, element, stack);
|
||||
} else if (element.getType().equals("StructureDefinition")) {
|
||||
return new StructureDefinitionValidator(context, timeTracker, fpe, wantCheckSnapshotUnchanged, xverManager, jurisdiction).validateStructureDefinition(errors, element, stack);
|
||||
} else if (element.getType().equals("StructureMap")) {
|
||||
return new StructureMapValidator(context, timeTracker, fpe, xverManager,profileUtilities, jurisdiction).validateStructureMap(errors, element, stack);
|
||||
} else if (element.getType().equals("ValueSet")) {
|
||||
return new ValueSetValidator(context, timeTracker, this, xverManager, jurisdiction).validateValueSet(errors, element, stack);
|
||||
} else {
|
||||
|
@ -5849,7 +5859,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
}
|
||||
|
||||
if (!ToolingExtensions.readBoolExtension(profile, "http://hl7.org/fhir/StructureDefinition/structuredefinition-xml-no-order")) {
|
||||
boolean ok = (ei.definition == null) || (ei.index >= last) || isXmlAttr;
|
||||
boolean ok = (ei.definition == null) || (ei.index >= last) || isXmlAttr || ei.getElement().isIgnorePropertyOrder();
|
||||
rule(errors, NO_RULE_DATE, IssueType.INVALID, ei.line(), ei.col(), ei.getPath(), ok, I18nConstants.VALIDATION_VAL_PROFILE_OUTOFORDER, profile.getVersionedUrl(), ei.getName(), lastei == null ? "(null)" : lastei.getName());
|
||||
}
|
||||
if (ei.slice != null && ei.index == last && ei.slice.getSlicing().getOrdered()) {
|
||||
|
@ -6010,7 +6020,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
}
|
||||
List<ValidationMessage> invErrors = null;
|
||||
// We key based on inv.expression rather than inv.key because expressions can change in derived profiles and aren't guaranteed to be consistent across profiles.
|
||||
String key = FHIRPathExpressionFixer.fixExpr(inv.getExpression(), inv.getKey());
|
||||
String key = FHIRPathExpressionFixer.fixExpr(inv.getExpression(), inv.getKey(), context.getVersion());
|
||||
if (!invMap.keySet().contains(key)) {
|
||||
invErrors = new ArrayList<ValidationMessage>();
|
||||
invMap.put(key, invErrors);
|
||||
|
@ -6064,7 +6074,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
if (n == null) {
|
||||
long t = System.nanoTime();
|
||||
try {
|
||||
String expr = FHIRPathExpressionFixer.fixExpr(inv.getExpression(), inv.getKey());
|
||||
String expr = FHIRPathExpressionFixer.fixExpr(inv.getExpression(), inv.getKey(), context.getVersion());
|
||||
n = fpe.parse(expr);
|
||||
} catch (FHIRException e) {
|
||||
rule(errors, NO_RULE_DATE, IssueType.INVARIANT, element.line(), element.col(), path, false, I18nConstants.PROBLEM_PROCESSING_EXPRESSION__IN_PROFILE__PATH__, inv.getExpression(), profile.getVersionedUrl(), path, e.getMessage());
|
||||
|
@ -6275,7 +6285,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
try {
|
||||
ExpressionNode n = (ExpressionNode) inv.getUserData("validator.expression.cache");
|
||||
if (n == null) {
|
||||
n = fpe.parse(FHIRPathExpressionFixer.fixExpr(inv.getExpression(), inv.getKey()));
|
||||
n = fpe.parse(FHIRPathExpressionFixer.fixExpr(inv.getExpression(), inv.getKey(), context.getVersion()));
|
||||
inv.setUserData("validator.expression.cache", n);
|
||||
}
|
||||
fpe.check(null, sd.getKind() == StructureDefinitionKind.RESOURCE ? sd.getType() : "DomainResource", ed.getPath(), n);
|
||||
|
|
|
@ -0,0 +1,958 @@
|
|||
package org.hl7.fhir.validation.instance.type;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hl7.fhir.r5.conformance.profile.ProfileUtilities;
|
||||
import org.hl7.fhir.r5.context.ContextUtilities;
|
||||
import org.hl7.fhir.r5.context.IWorkerContext;
|
||||
import org.hl7.fhir.r5.elementmodel.Element;
|
||||
import org.hl7.fhir.r5.model.Coding;
|
||||
import org.hl7.fhir.r5.model.ElementDefinition;
|
||||
import org.hl7.fhir.r5.model.ElementDefinition.TypeRefComponent;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition;
|
||||
import org.hl7.fhir.r5.model.StructureMap;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapGroupComponent;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapGroupInputComponent;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapGroupTypeMode;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapInputMode;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapModelMode;
|
||||
import org.hl7.fhir.r5.model.StructureMap.StructureMapStructureComponent;
|
||||
import org.hl7.fhir.r5.model.TypeDetails;
|
||||
import org.hl7.fhir.r5.utils.FHIRPathEngine;
|
||||
import org.hl7.fhir.r5.utils.XVerExtensionManager;
|
||||
import org.hl7.fhir.r5.utils.structuremap.StructureMapUtilities;
|
||||
import org.hl7.fhir.utilities.CommaSeparatedStringBuilder;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.i18n.I18nConstants;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueType;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.Source;
|
||||
import org.hl7.fhir.validation.BaseValidator;
|
||||
import org.hl7.fhir.validation.TimeTracker;
|
||||
import org.hl7.fhir.validation.instance.utils.NodeStack;
|
||||
|
||||
public class StructureMapValidator extends BaseValidator {
|
||||
|
||||
public class ElementDefinitionSource {
|
||||
private StructureDefinition sd;
|
||||
private ElementDefinition ed;
|
||||
protected ElementDefinitionSource(StructureDefinition sd, ElementDefinition ed) {
|
||||
super();
|
||||
this.sd = sd;
|
||||
this.ed = ed;
|
||||
}
|
||||
public StructureDefinition getSd() {
|
||||
return sd;
|
||||
}
|
||||
public ElementDefinition getEd() {
|
||||
return ed;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class RuleInformation {
|
||||
|
||||
int maxCount = 1;
|
||||
String defVariable;
|
||||
|
||||
public void seeCardinality(int max) {
|
||||
if (max == Integer.MAX_VALUE || maxCount == Integer.MAX_VALUE) {
|
||||
maxCount = Integer.MAX_VALUE;
|
||||
} else {
|
||||
maxCount = maxCount * max;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isList() {
|
||||
return maxCount > 1;
|
||||
}
|
||||
|
||||
public int getMaxCount() {
|
||||
return maxCount;
|
||||
}
|
||||
|
||||
public String getDefVariable() {
|
||||
return defVariable;
|
||||
}
|
||||
|
||||
public void setDefVariable(String defVariable) {
|
||||
this.defVariable = defVariable;
|
||||
}
|
||||
}
|
||||
|
||||
public class VariableDefn {
|
||||
|
||||
private String name;
|
||||
private String mode;
|
||||
private int max;
|
||||
private StructureDefinition sd;
|
||||
private ElementDefinition ed;
|
||||
private String type;
|
||||
|
||||
|
||||
protected VariableDefn(String name, String mode) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public VariableDefn setType(int max, StructureDefinition sd, ElementDefinition ed, String type) {
|
||||
this.max = max;
|
||||
this.sd = sd;
|
||||
this.ed = ed;
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
public VariableDefn copy() {
|
||||
VariableDefn n = new VariableDefn(name, mode);
|
||||
n.max = max;
|
||||
n.sd = sd;
|
||||
n.ed = ed;
|
||||
n.type = type;
|
||||
return n;
|
||||
}
|
||||
|
||||
public boolean hasTypeInfo() {
|
||||
return sd != null;
|
||||
}
|
||||
|
||||
public int getMax() {
|
||||
return max;
|
||||
}
|
||||
|
||||
public StructureDefinition getSd() {
|
||||
return sd;
|
||||
}
|
||||
|
||||
public ElementDefinition getEd() {
|
||||
return ed;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getWorkingType() {
|
||||
if (type != null) {
|
||||
return type;
|
||||
}
|
||||
if (ed != null && ed.getType().size() == 1) {
|
||||
return ed.getType().get(0).getWorkingCode();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class VariableSet {
|
||||
|
||||
private List<VariableDefn> list = new ArrayList<>();
|
||||
|
||||
public boolean hasVariable(String name) {
|
||||
for (VariableDefn v : list) {
|
||||
if (name.equals(v.getName())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean hasVariable(String name, boolean source) {
|
||||
for (VariableDefn v : list) {
|
||||
if (name.equals(v.getName()) && source == ("source".equals(v.getMode()))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public VariableDefn add(String name, String mode) {
|
||||
list.removeIf(item -> item.getName().equals(name) && item.getMode().equals(mode));
|
||||
VariableDefn v = new VariableDefn(name, mode);
|
||||
list.add(v);
|
||||
return v;
|
||||
}
|
||||
//
|
||||
// public void add(VariableDefn v) {
|
||||
// list.removeIf(item -> item.getName().equals(v.getName()) && item.getMode().equals(v.getMode()));
|
||||
// list.add(v);
|
||||
// }
|
||||
//
|
||||
public VariableSet copy() {
|
||||
VariableSet set = new VariableSet();
|
||||
for (VariableDefn v : list) {
|
||||
set.list.add(v.copy());
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
public VariableDefn getVariable(String name, boolean source) {
|
||||
for (VariableDefn v : list) {
|
||||
if (name.equals(v.getName()) && source == ("source".equals(v.getMode()))) {
|
||||
return v;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void add(String pname, VariableDefn v) {
|
||||
VariableDefn vn = v.copy();
|
||||
vn.name = pname;
|
||||
list.add(vn);
|
||||
}
|
||||
}
|
||||
|
||||
private static final boolean SOURCE = true;
|
||||
private static final boolean TARGET = false;
|
||||
|
||||
private FHIRPathEngine fpe;
|
||||
private ProfileUtilities profileUtilities;
|
||||
private ContextUtilities cu;
|
||||
private List<StructureMap> imports = new ArrayList<>();
|
||||
|
||||
public StructureMapValidator(IWorkerContext context, TimeTracker timeTracker, FHIRPathEngine fpe, XVerExtensionManager xverManager, ProfileUtilities profileUtilities, Coding jurisdiction) {
|
||||
super(context, xverManager);
|
||||
source = Source.InstanceValidator;
|
||||
this.fpe = fpe;
|
||||
this.timeTracker = timeTracker;
|
||||
this.jurisdiction = jurisdiction;
|
||||
this.profileUtilities = profileUtilities;
|
||||
this.cu = new ContextUtilities(context);
|
||||
|
||||
}
|
||||
|
||||
public boolean validateStructureMap(List<ValidationMessage> errors, Element src, NodeStack stack) {
|
||||
boolean ok = true;
|
||||
List<Element> imports = src.getChildrenByName("import");
|
||||
int cc = 0;
|
||||
for (Element import_ : imports) {
|
||||
ok = validateImport(errors, src, import_, stack.push(import_, cc, null, null)) && ok;
|
||||
cc++;
|
||||
}
|
||||
|
||||
List<Element> groups = src.getChildrenByName("group");
|
||||
// we iterate the groups repeatedly, validating them if they have stated types or found types, until nothing happens
|
||||
boolean fired = false;
|
||||
do {
|
||||
fired = false;
|
||||
cc = 0;
|
||||
for (Element group : groups) {
|
||||
if (!group.hasUserData("structuremap.validated")) {
|
||||
if (hasInputTypes(group) || group.hasUserData("structuremap.parameters")) {
|
||||
group.setUserData("structuremap.validated", true);
|
||||
fired = true;
|
||||
ok = validateGroup(errors, src, group, stack.push(group, cc, null, null)) && ok;
|
||||
}
|
||||
}
|
||||
cc++;
|
||||
}
|
||||
} while (fired);
|
||||
|
||||
cc = 0;
|
||||
for (Element group : groups) {
|
||||
if (!group.hasUserData("structuremap.validated")) {
|
||||
hint(errors, "2023-03-01", IssueType.INFORMATIONAL, group.line(), group.col(), stack.getLiteralPath(), ok, I18nConstants.SM_ORPHAN_GROUP);
|
||||
ok = validateGroup(errors, src, group, stack.push(group, cc, null, null)) && ok;
|
||||
}
|
||||
cc++;
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
private boolean hasInputTypes(Element group) {
|
||||
List<Element> inputs = group.getChildrenByName("input");
|
||||
for (Element input : inputs) {
|
||||
if (!input.hasChild("type")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean validateImport(List<ValidationMessage> errors, Element src, Element import_, NodeStack stack) {
|
||||
String url = import_.primitiveValue();
|
||||
boolean ok = false;
|
||||
StructureMap map = context.fetchResource(StructureMap.class, url);
|
||||
if (map != null) {
|
||||
imports.add(map);
|
||||
ok = true;
|
||||
} else if (url.contains("*")) {
|
||||
List<StructureMap> maps = cu.listMaps(url);
|
||||
ok = !maps.isEmpty();
|
||||
imports.addAll(maps);
|
||||
}
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, import_.line(), import_.col(), stack.getLiteralPath(), ok, I18nConstants.SM_IMPORT_NOT_FOUND, url);
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean validateGroup(List<ValidationMessage> errors, Element src, Element group, NodeStack stack) {
|
||||
String name = group.getChildValue("name");
|
||||
boolean ok = rule(errors, "2023-03-01", IssueType.INVALID, group.line(), group.col(), stack.getLiteralPath(), idIsValid(name), I18nConstants.SM_NAME_INVALID, name);
|
||||
|
||||
Element extend = group.getNamedChild("extends");
|
||||
if (extend != null) {
|
||||
StructureMapGroupComponent grp = resolveGroup(extend.primitiveValue(), src);
|
||||
if (rule(errors, "2023-03-01", IssueType.NOTSUPPORTED, extend.line(), extend.col(), stack.push(extend, -1, null, null).getLiteralPath(), grp != null, I18nConstants.SM_RULEGROUP_NOT_FOUND, extend.primitiveValue())) {
|
||||
// check inputs
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
|
||||
VariableSet variables = new VariableSet();
|
||||
VariableSet pvars = (VariableSet) group.getUserData("structuremap.parameters");
|
||||
|
||||
// first, load all the inputs
|
||||
List<Element> inputs = group.getChildrenByName("input");
|
||||
List<Element> structures = src.getChildrenByName("structure");
|
||||
int cc = 0;
|
||||
for (Element input : inputs) {
|
||||
ok = validateInput(errors, src, group, input, stack.push(input, cc, null, null), structures, variables, pvars) && ok;
|
||||
cc++;
|
||||
}
|
||||
|
||||
// now check the rules.
|
||||
List<Element> rules = group.getChildrenByName("rule");
|
||||
cc = 0;
|
||||
for (Element rule : rules) {
|
||||
ok = validateRule(errors, src, group, rule, stack.push(rule, cc, null, null), variables) && ok;
|
||||
cc++;
|
||||
}
|
||||
|
||||
return ok;
|
||||
}
|
||||
|
||||
private StructureMapGroupComponent resolveGroup(String grpName, Element src) {
|
||||
if (grpName == null) {
|
||||
return null;
|
||||
}
|
||||
List<Element> groups = src.getChildrenByName("group");
|
||||
for (Element group : groups) {
|
||||
String name = group.getChildValue("name");
|
||||
if (grpName.equals(name)) {
|
||||
return makeGroupComponent(group);
|
||||
}
|
||||
}
|
||||
for (StructureMap map : imports) {
|
||||
for (StructureMapGroupComponent grp : map.getGroup()) {
|
||||
if (grpName.equals(grp.getName())) {
|
||||
return grp;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private StructureMapGroupComponent makeGroupComponent(Element group) {
|
||||
StructureMapGroupComponent grp = new StructureMapGroupComponent();
|
||||
grp.setUserData("element.source", group);
|
||||
grp.setName(group.getChildValue("name"));
|
||||
List<Element> inputs = group.getChildrenByName("input");
|
||||
for (Element input : inputs) {
|
||||
StructureMapGroupInputComponent inp = grp.addInput();
|
||||
inp.setName(input.getChildValue("name"));
|
||||
inp.setType(input.getChildValue("type"));
|
||||
try {
|
||||
inp.setMode(StructureMapInputMode.fromCode(input.getChildValue("mode")));
|
||||
} catch (Exception e) {
|
||||
// nothing; will be an error elsewhere
|
||||
}
|
||||
}
|
||||
return grp;
|
||||
}
|
||||
|
||||
private boolean validateInput(List<ValidationMessage> errors, Element src, Element group, Element input, NodeStack stack, List<Element> structures, VariableSet variables, VariableSet pvars) {
|
||||
boolean ok = false;
|
||||
String name = input.getChildValue("name");
|
||||
String mode = input.getChildValue("mode");
|
||||
String type = input.getChildValue("type");
|
||||
VariableDefn pv = null;
|
||||
if (type == null && pvars != null) {
|
||||
pv = pvars.getVariable(name, mode.equals("source"));
|
||||
if (pv != null) {
|
||||
type = pv.getWorkingType();
|
||||
}
|
||||
}
|
||||
|
||||
if (rule(errors, "2023-03-01", IssueType.NOTSUPPORTED, input.line(), input.col(), stack.getLiteralPath(), idIsValid(name), I18nConstants.SM_NAME_INVALID, name) && // the name {0} is not valid)
|
||||
rule(errors, "2023-03-01", IssueType.DUPLICATE, input.line(), input.col(), stack.getLiteralPath(), !variables.hasVariable(name), I18nConstants.SM_GROUP_INPUT_DUPLICATE, name)) { // the name {0} is not valid)
|
||||
VariableDefn v = variables.add(name, mode);
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, input.line(), input.col(), stack.getLiteralPath(), Utilities.existsInList(mode, "source", "target"), I18nConstants.SM_GROUP_INPUT_MODE_INVALID, name, mode) && // the group parameter {0} mode {1} isn't valid
|
||||
warning(errors, "2023-03-01", IssueType.NOTSUPPORTED, input.line(), input.col(), stack.getLiteralPath(), type != null, I18nConstants.SM_GROUP_INPUT_NO_TYPE, name)) { // the group parameter {0} has no type, so the paths cannot be validated
|
||||
String smode = null;
|
||||
StructureDefinition sd = null;
|
||||
ElementDefinition ed = null;
|
||||
if (pv != null) {
|
||||
sd = pv.getSd();
|
||||
ed = pv.getEd();
|
||||
} else {
|
||||
Element structure = findStructure(structures, type);
|
||||
if (structure != null) {
|
||||
smode = structure.getChildValue("mode");
|
||||
String url = structure.getChildValue("url");
|
||||
sd = context.fetchResource(StructureDefinition.class, url);
|
||||
if (sd == null) {
|
||||
rule(errors, "2023-03-01", IssueType.INVALID, input.line(), input.col(), stack.getLiteralPath(), sd != null, I18nConstants.SM_GROUP_INPUT_TYPE_UNKNOWN_STRUCTURE, type, url);
|
||||
}
|
||||
} else if (type != null) {
|
||||
sd = context.fetchTypeDefinition(type);
|
||||
if (sd == null) {
|
||||
rule(errors, "2023-03-01", IssueType.INVALID, input.line(), input.col(), stack.getLiteralPath(), sd != null, I18nConstants.SM_GROUP_INPUT_TYPE_UNKNOWN_TYPE, type);
|
||||
}
|
||||
} else {
|
||||
rule(errors, "2023-03-01", IssueType.NOTSUPPORTED, input.line(), input.col(), stack.getLiteralPath(), structure != null, I18nConstants.SM_GROUP_INPUT_TYPE_NOT_DECLARED, type);
|
||||
ok = false;
|
||||
}
|
||||
if (sd != null) {
|
||||
ed = sd.getSnapshot().getElementFirstRep();
|
||||
}
|
||||
}
|
||||
if (rule(errors, "2023-03-01", IssueType.NOTSUPPORTED, input.line(), input.col(), stack.getLiteralPath(), smode == null || mode.equals(smode), I18nConstants.SM_GROUP_INPUT_MODE_MISMATCH, type, mode, smode)) { // the type {0} has mode {1} which doesn't match the structure definition {2}
|
||||
v.setType(1, sd, ed, null);
|
||||
ok = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
private Element findStructure(List<Element> structures, String type) {
|
||||
for (Element structure : structures ) {
|
||||
String t = structure.getChildValue("alias");
|
||||
if (type.equals(t)) {
|
||||
return structure;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private boolean idIsValid(String name) {
|
||||
return name != null && name.matches("[a-zA-Z][a-zA-Z0-9]*");
|
||||
}
|
||||
|
||||
private boolean validateRule(List<ValidationMessage> errors, Element src, Element group, Element rule, NodeStack stack, VariableSet variables) {
|
||||
String name = rule.getChildValue("name");
|
||||
boolean ok = rule(errors, "2023-03-01", IssueType.INVALID, rule.line(), rule.col(), stack.getLiteralPath(), idIsValid(name), I18nConstants.SM_NAME_INVALID, name);
|
||||
|
||||
RuleInformation ruleInfo = new RuleInformation();
|
||||
// process the sources
|
||||
VariableSet lvars = variables.copy();
|
||||
List<Element> sources = rule.getChildrenByName("source");
|
||||
int cc = 0;
|
||||
for (Element source : sources) {
|
||||
ok = validateRuleSource(errors, src, group, rule, source, stack.push(source, cc, null, null), lvars, ruleInfo, cc) && ok;
|
||||
cc++;
|
||||
}
|
||||
// process the targets
|
||||
List<Element> targets = rule.getChildrenByName("target");
|
||||
cc = 0;
|
||||
for (Element target : targets) {
|
||||
ok = validateRuleTarget(errors, src, group, rule, target, stack.push(target, cc, null, null), lvars, ruleInfo) && ok;
|
||||
cc++;
|
||||
}
|
||||
|
||||
// process the targets
|
||||
List<Element> rules = rule.getChildrenByName("rule");
|
||||
cc = 0;
|
||||
for (Element child : rules) {
|
||||
ok = validateRule(errors, src, group, child, stack.push(child, cc, null, null), lvars) && ok;
|
||||
cc++;
|
||||
}
|
||||
// todo: check dependents
|
||||
List<Element> dependents = rule.getChildrenByName("dependent");
|
||||
cc = 0;
|
||||
for (Element dependent : dependents) {
|
||||
ok = validateDependent(errors, src, group, dependent, stack.push(dependent, cc, null, null), lvars) && ok;
|
||||
cc++;
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
private boolean validateRuleSource(List<ValidationMessage> errors, Element src, Element group, Element rule, Element source, NodeStack stack, VariableSet variables, RuleInformation ruleInfo, int loopCounter) {
|
||||
String context = source.getChildValue("context");
|
||||
if (loopCounter > 0) {
|
||||
ruleInfo.setDefVariable(null);
|
||||
}
|
||||
boolean ok = rule(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), idIsValid(context), I18nConstants.SM_NAME_INVALID, context) &&
|
||||
rule(errors, "2023-03-01", IssueType.UNKNOWN, source.line(), source.col(), stack.getLiteralPath(), variables.hasVariable(context, SOURCE), I18nConstants.SM_SOURCE_CONTEXT_UNKNOWN, context);
|
||||
if (ok) {
|
||||
VariableDefn v = variables.getVariable(context, SOURCE);
|
||||
if (v.hasTypeInfo()) { // if it doesn't, that's already an issue elsewhere
|
||||
// check type
|
||||
// check defaultValue
|
||||
// check element
|
||||
String element = source.getChildValue("element");
|
||||
if (element != null) {
|
||||
String path = v.getEd().getPath()+"."+element;
|
||||
String variable = source.getChildValue("variable");
|
||||
VariableDefn vn = null;
|
||||
if (hint(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), variable != null, I18nConstants.SM_RULE_SOURCE_UNASSIGNED)) {
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), idIsValid(variable), I18nConstants.SM_NAME_INVALID, variable)) {
|
||||
vn = variables.add(variable, v.getMode()); // may overwrite
|
||||
if (loopCounter == 0) {
|
||||
ruleInfo.setDefVariable(variable);
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
|
||||
List<ElementDefinitionSource> els = getElementDefinitions(v.getSd(), v.getEd(), v.getType(), element);
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), !els.isEmpty(), I18nConstants.SM_SOURCE_PATH_INVALID, context, element, path)) {
|
||||
if (warning(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), els.size() == 1, I18nConstants.SM_TARGET_PATH_MULTIPLE_MATCHES, context, element, v.getEd().getPath()+"."+element, render(els))) {
|
||||
ElementDefinitionSource el = els.get(0);
|
||||
String type = source.getChildValue("type");
|
||||
if (type != null) {
|
||||
ok = rule(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), hasType(el.getEd(), type), I18nConstants.SM_SOURCE_TYPE_INVALID, type, path, el.getEd().typeSummary()) && ok;
|
||||
}
|
||||
String min = source.getChildValue("min");
|
||||
hint(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), min == null || isMoreOrEqual(min, v.getEd().getMin()), I18nConstants.SM_RULE_SOURCE_MIN_REDUNDANT, min, v.getEd().getMin());
|
||||
|
||||
int existingMax = multiplyCardinality(v.getMax(), el.getEd().getMax());
|
||||
String max = source.getChildValue("max");
|
||||
int iMax = readMax(max, existingMax);
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, source.line(), source.col(), stack.getLiteralPath(), iMax <= existingMax, I18nConstants.SM_RULE_SOURCE_MAX_REDUNDANT, max, v.getMax());
|
||||
ruleInfo.seeCardinality(iMax);
|
||||
|
||||
|
||||
if (vn != null) {
|
||||
vn.setType(iMax, el.getSd(), el.getEd(), type); // may overwrite
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
// check condition
|
||||
// check check
|
||||
}
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
private boolean hasType(ElementDefinition ed, String type) {
|
||||
for (TypeRefComponent td : ed.getType()) {
|
||||
if (type.equals(td.getWorkingCode())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private int readMax(String max, int existingMax) {
|
||||
if (max == null || !Utilities.isInteger(max)) {
|
||||
return existingMax;
|
||||
} else {
|
||||
return Integer.parseInt(max);
|
||||
}
|
||||
}
|
||||
|
||||
private int multiplyCardinality(int max, String max2) {
|
||||
if (max == Integer.MAX_VALUE || "*".equals(max2)) {
|
||||
return Integer.MAX_VALUE;
|
||||
} else {
|
||||
return max * Integer.parseInt(max2);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean validateRuleTarget(List<ValidationMessage> errors, Element src, Element group, Element rule, Element target, NodeStack stack, VariableSet variables, RuleInformation ruleInfo) {
|
||||
String context = target.getChildValue("context");
|
||||
if (context == null) {
|
||||
return true;
|
||||
}
|
||||
boolean ok = rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), idIsValid(context), I18nConstants.SM_NAME_INVALID, context) &&
|
||||
rule(errors, "2023-03-01", IssueType.UNKNOWN, target.line(), target.col(), stack.getLiteralPath(), variables.hasVariable(context, TARGET), I18nConstants.SM_TARGET_CONTEXT_UNKNOWN, context);
|
||||
if (ok) {
|
||||
VariableDefn v = variables.getVariable(context, TARGET);
|
||||
if (v.hasTypeInfo()) {
|
||||
String listMode = target.getChildValue("listMode");
|
||||
String listRuleId = target.getChildValue("listRuleId");
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), listRuleId == null || "share".equals(listMode), I18nConstants.SM_LIST_RULE_ID_ONLY_WHEN_SHARE);
|
||||
if (!ruleInfo.isList()) {
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), listMode == null, I18nConstants.SM_NO_LIST_MODE_NEEDED);
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), listRuleId == null, I18nConstants.SM_NO_LIST_RULE_ID_NEEDED);
|
||||
}
|
||||
VariableDefn vn = null;
|
||||
String variable = target.getChildValue("variable");
|
||||
if (variable != null) {
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), idIsValid(variable), I18nConstants.SM_NAME_INVALID, variable)) {
|
||||
vn = variables.add(variable, v.getMode()); // may overwrite
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
|
||||
String element = target.getChildValue("element");
|
||||
if (element != null) {
|
||||
List<ElementDefinitionSource> els = getElementDefinitions(v.getSd(), v.getEd(), v.getType(), element);
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), !els.isEmpty(), I18nConstants.SM_TARGET_PATH_INVALID, context, element, v.getEd().getPath()+"."+element)) {
|
||||
if (warning(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), els.size() == 1, I18nConstants.SM_TARGET_PATH_MULTIPLE_MATCHES, context, element, v.getEd().getPath()+"."+element, render(els))) {
|
||||
ElementDefinitionSource el = els.get(0);
|
||||
String transform = target.getChildValue("transform");
|
||||
List<Element> params = target.getChildren("parameter");
|
||||
if (transform == null) {
|
||||
transform = "create"; // implied
|
||||
rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), params.size() == 0, I18nConstants.SM_TARGET_NO_TRANSFORM_NO_CHECKED, transform);
|
||||
}
|
||||
// List<String> types = listTypes(el.getEd().getType());
|
||||
String type = null;
|
||||
if (el.getEd().getType().size() == 1) {
|
||||
type = el.getEd().getTypeFirstRep().getWorkingCode();
|
||||
} else {
|
||||
type = inferType(ruleInfo, variables, rule, transform, params);
|
||||
}
|
||||
|
||||
switch (transform) {
|
||||
case "create":
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), params.size() < 2, I18nConstants.SM_TARGET_TRANSFORM_PARAM_COUNT_RANGE, "create", "0", "1", params.size())) {
|
||||
if (params.size() == 1) {
|
||||
type = params.get(0).getChildValue("value");
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(),type != null, I18nConstants.SM_TARGET_TRANSFORM_TYPE_UNPROCESSIBLE);
|
||||
} else {
|
||||
// maybe can guess? maybe not ... type =
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
break;
|
||||
case "copy": // logic is the same as create?
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), params.size() < 2, I18nConstants.SM_TARGET_TRANSFORM_PARAM_COUNT_RANGE, "create", "0", "1", params.size())) {
|
||||
if (params.size() == 1) {
|
||||
type = params.get(0).getChildValue("value");
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(),type != null, I18nConstants.SM_TARGET_TRANSFORM_TYPE_UNPROCESSIBLE);
|
||||
} else {
|
||||
// maybe can guess? maybe not ... type =
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
break;
|
||||
case "reference":
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), params.size() == 1, I18nConstants.SM_TARGET_TRANSFORM_PARAM_COUNT_RANGE, "reference", "0", "1", params.size())) {
|
||||
type = "string";
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
break;
|
||||
case "evaluate":
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), params.size() == 1, I18nConstants.SM_TARGET_TRANSFORM_PARAM_COUNT_SINGLE, "evaluate", "1", params.size())) {
|
||||
String exp = params.get(0).getChildValue("value");
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, params.get(0).line(), params.get(0).col(), stack.getLiteralPath(), exp != null, I18nConstants.SM_TARGET_TRANSFORM_PARAM_UNPROCESSIBLE, "0", params.size())) {
|
||||
try {
|
||||
TypeDetails td = fpe.check(null, v.getSd().getType(), v.getEd().getPath(), fpe.parse(exp));
|
||||
if (td.getTypes().size() == 1) {
|
||||
type = td.getType();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
rule(errors, "2023-03-01", IssueType.INVALID, params.get(0).line(), params.get(0).col(), stack.getLiteralPath(), false, I18nConstants.SM_TARGET_TRANSFORM_EXPRESSION_ERROR, e.getMessage());
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
rule(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), false, I18nConstants.SM_TARGET_TRANSFORM_NOT_CHECKED, transform);
|
||||
ok = false;
|
||||
}
|
||||
if (vn != null) {
|
||||
// it's just a warning: maybe this'll work out at run time?
|
||||
warning(errors, "2023-03-01", IssueType.INVALID, target.line(), target.col(), stack.getLiteralPath(), type != null, I18nConstants.SM_TARGET_TYPE_MULTIPLE_POSSIBLE, el.getEd().typeSummary());
|
||||
|
||||
vn.setType(ruleInfo.getMaxCount(), el.getSd(), el.getEd(), type); // may overwrite
|
||||
}
|
||||
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
//
|
||||
}
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
private String inferType(RuleInformation ruleInfo, VariableSet variables, Element rule, String transform, List<Element> params) {
|
||||
// under some special conditions, we can infer what the type will be:
|
||||
// * there's a nominated default variable
|
||||
// * that variable has as single type
|
||||
// * there's a create with no param
|
||||
// * there's a single dependent rule with name = StructureMapUtilities.DEF_GROUP_NAME
|
||||
// * there's a default type group for the type of the source type
|
||||
// otherwise, we can't know the target type.
|
||||
|
||||
if (ruleInfo.getDefVariable() != null && "create".equals(transform) && params.isEmpty()) {
|
||||
VariableDefn v = variables.getVariable(ruleInfo.getDefVariable(), SOURCE);
|
||||
if (v != null && (v.getEd().getType().size() == 1 || v.getType() != null)) {
|
||||
List<Element> dependents = rule.getChildrenByName("dependent");
|
||||
if (dependents.size() == 1 && StructureMapUtilities.DEF_GROUP_NAME.equals(dependents.get(0).getChildValue("name"))) {
|
||||
String type = v.getType() != null ? v.getType() : v.getEd().getTypeFirstRep().getWorkingCode();
|
||||
// now, we look for a default group.
|
||||
// todo: look in this source
|
||||
// now look through the inputs
|
||||
for (StructureMap map : imports) {
|
||||
for (StructureMapGroupComponent grp : map.getGroup()) {
|
||||
if (grp.getTypeMode() == StructureMapGroupTypeMode.TYPEANDTYPES && grp.getInput().size() == 2) {
|
||||
String grpType = getTypeForGroupInput(map, grp, grp.getInput().get(0));
|
||||
if (sameTypes(type, grpType)) {
|
||||
String tgtType = getTypeForGroupInput(map, grp, grp.getInput().get(1));
|
||||
if (tgtType != null) {
|
||||
return tgtType;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private boolean sameTypes(String type1, String type2) {
|
||||
if (type1 == null || type2 == null) {
|
||||
return false;
|
||||
}
|
||||
if (!Utilities.isAbsoluteUrl(type1)) {
|
||||
type1 = "http://hl7.org/fhir/StructureDefinition/"+type1;
|
||||
}
|
||||
if (!Utilities.isAbsoluteUrl(type2)) {
|
||||
type2 = "http://hl7.org/fhir/StructureDefinition/"+type2;
|
||||
}
|
||||
return type1.equals(type2);
|
||||
}
|
||||
|
||||
private String getTypeForGroupInput(StructureMap map, StructureMapGroupComponent grp, StructureMapGroupInputComponent input) {
|
||||
String type = input.getType();
|
||||
StructureMapModelMode mode = input.getMode() == StructureMapInputMode.SOURCE ? StructureMapModelMode.SOURCE : StructureMapModelMode.TARGET;
|
||||
if (input == null) {
|
||||
return null;
|
||||
}
|
||||
for (StructureMapStructureComponent st : map.getStructure()) {
|
||||
if (type.equals(st.getAlias()) && mode == st.getMode()) {
|
||||
return st.getUrl();
|
||||
}
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
private List<String> listTypes(List<TypeRefComponent> types) {
|
||||
List<String> res = new ArrayList<>();
|
||||
for (TypeRefComponent td : types) {
|
||||
res.add(td.getWorkingCode());
|
||||
}
|
||||
Collections.sort(res);
|
||||
return res;
|
||||
}
|
||||
|
||||
private String render(List<ElementDefinitionSource> list) {
|
||||
CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder();
|
||||
for (ElementDefinitionSource t : list) {
|
||||
b.append(t.getEd().getId());
|
||||
}
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
private List<ElementDefinitionSource> getElementDefinitions(StructureDefinition sd, ElementDefinition ed, String type, String element) {
|
||||
List<ElementDefinitionSource> result = new ArrayList<>();
|
||||
List<ElementDefinition> children = profileUtilities.getChildList(sd, ed);
|
||||
if (children == null || children.isEmpty()) {
|
||||
getElementDefinitionChildrenFromTypes(result, sd, ed, type, element);
|
||||
} else {
|
||||
for (ElementDefinition t : children) {
|
||||
if (t.getNameBase().equals(element)) {
|
||||
result.add(new ElementDefinitionSource(sd, t));
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void getElementDefinitionChildrenFromTypes(List<ElementDefinitionSource> result, StructureDefinition sd, ElementDefinition ed, String type, String element) {
|
||||
for (TypeRefComponent td : ed.getType()) {
|
||||
if (type == null | td.getWorkingCode().equals(type)) {
|
||||
StructureDefinition tsd = context.fetchTypeDefinition(td.getWorkingCode());
|
||||
if (tsd != null) {
|
||||
for (ElementDefinition t : tsd.getSnapshot().getElement()) {
|
||||
if (Utilities.charCount(t.getPath(), '.') == 1 && t.getNameBase().equals(element)) {
|
||||
result.add(new ElementDefinitionSource(tsd, t));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
System.out.println("Unable to find type "+type);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isLessOrEqual(String value, String limit) {
|
||||
if (Utilities.isInteger(value) || !Utilities.isInteger(limit)) {
|
||||
int v = Integer.parseInt(value);
|
||||
int l = Integer.parseInt(limit);
|
||||
return v <= l;
|
||||
}
|
||||
return true; // no issue in this case
|
||||
}
|
||||
|
||||
private boolean isMoreOrEqual(String value, int limit) {
|
||||
if (Utilities.isInteger(value)) {
|
||||
int v = Integer.parseInt(value);
|
||||
return v >= limit;
|
||||
}
|
||||
return true; // no issue in this case
|
||||
}
|
||||
|
||||
|
||||
private boolean validateDependent(List<ValidationMessage> errors, Element src, Element group, Element dependent, NodeStack stack, VariableSet variables) {
|
||||
boolean ok = true;
|
||||
String name = dependent.getChildValue("name");
|
||||
if (StructureMapUtilities.DEF_GROUP_NAME.equals(name)) {
|
||||
VariableDefn srcVar = variables.getVariable(StructureMapUtilities.AUTO_VAR_NAME, true);
|
||||
VariableDefn tgtVar = variables.getVariable(StructureMapUtilities.AUTO_VAR_NAME, false);
|
||||
if (srcVar != null && srcVar.hasTypeInfo() && tgtVar != null && tgtVar.hasTypeInfo()) {
|
||||
String srcType = srcVar.getWorkingType();
|
||||
String tgtType = tgtVar.getWorkingType();
|
||||
if (rule(errors, "2023-03-01", IssueType.NOTFOUND, dependent.line(), dependent.col(), stack.getLiteralPath(), srcType != null, I18nConstants.SM_SOURCE_TYPE_NOT_FOUND) &&
|
||||
rule(errors, "2023-03-01", IssueType.NOTFOUND, dependent.line(), dependent.col(), stack.getLiteralPath(), tgtType != null, I18nConstants.SM_TARGET_TYPE_NOT_FOUND)) {
|
||||
StructureMapGroupComponent grp = findDefaultGroup(src, srcType, tgtType);
|
||||
ok = rule(errors, "2023-03-01", IssueType.NOTFOUND, dependent.line(), dependent.col(), stack.getLiteralPath(), grp != null, I18nConstants.SM_MATCHING_RULEGROUP_NOT_FOUND, srcType, tgtType) && ok;
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
StructureMapGroupComponent grp = resolveGroup(name, src);
|
||||
if (rule(errors, "2023-03-01", IssueType.NOTFOUND, dependent.line(), dependent.col(), stack.getLiteralPath(), grp != null, I18nConstants.SM_RULEGROUP_NOT_FOUND, name)) {
|
||||
List<Element> params = dependent.getChildren("parameter");
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, dependent.line(), dependent.col(), stack.getLiteralPath(), params.size() == grp.getInput().size(), I18nConstants.SM_RULEGROUP_NOT_FOUND, params.size(), grp.getInput().size())) {
|
||||
VariableSet lvars = new VariableSet();
|
||||
int cc = 0;
|
||||
for (Element param : params) {
|
||||
NodeStack pstack = stack.push(param, cc, null, null);
|
||||
StructureMapGroupInputComponent input = grp.getInput().get(cc);
|
||||
String pname = input.getName();
|
||||
VariableDefn v = getParameter(errors, param, pstack, variables, input.getMode());
|
||||
if (v != null) {
|
||||
if (rule(errors, "2023-03-01", IssueType.INVALID, param.line(), param.col(), pstack.getLiteralPath(), v.mode.equals(input.getMode().toCode()), I18nConstants.SM_DEPENDENT_PARAM_MODE_MISMATCH, param.getChildValue("name"), v.mode, input.getMode().toCode()) &&
|
||||
rule(errors, "2023-03-01", IssueType.INVALID, param.line(), param.col(), pstack.getLiteralPath(), typesMatch(v, input.getType()), I18nConstants.SM_DEPENDENT_PARAM_TYPE_MISMATCH, param.getChildValue("name"), v, input.getType())) {
|
||||
lvars.add(pname, v);
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
cc++;
|
||||
}
|
||||
if (ok && grp.hasUserData("element.source")) {
|
||||
Element g = (Element) grp.getUserData("element.source");
|
||||
if (g.hasUserData("structuremap.parameters")) {
|
||||
throw new Error("bang! - this situation is not handled");
|
||||
} else {
|
||||
g.setUserData("structuremap.parameters", lvars);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
private StructureMapGroupComponent findDefaultGroup(Element src, String srcType, String tgtType) {
|
||||
List<Element> groups = src.getChildrenByName("group");
|
||||
for (Element group : groups) {
|
||||
if (Utilities.existsInList(group.getChildValue("typeMode"), "types", "type-and-types")) {
|
||||
List<Element> inputs = group.getChildrenByName("input");
|
||||
if (inputs.size() == 2 && "source".equals(inputs.get(0).getChildValue("mode")) && "source".equals(inputs.get(0).getChildValue("mode"))) {
|
||||
String srcT = resolveInputType(src, inputs.get(0));
|
||||
String tgtT = resolveInputType(src, inputs.get(1));
|
||||
if (sameTypes(srcT, srcType) && sameTypes(tgtT, tgtType)) {
|
||||
return makeGroupComponent(group);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (StructureMap map : imports) {
|
||||
for (StructureMapGroupComponent grp : map.getGroup()) {
|
||||
if ((grp.getTypeMode() == StructureMapGroupTypeMode.TYPES || grp.getTypeMode() == StructureMapGroupTypeMode.TYPEANDTYPES) &&
|
||||
grp.getInput().size() == 2 && grp.getInput().get(0).getMode() == StructureMapInputMode.SOURCE && grp.getInput().get(1).getMode() == StructureMapInputMode.TARGET) {
|
||||
String srcT = resolveInputType(map, grp.getInput().get(0));
|
||||
String tgtT = resolveInputType(map, grp.getInput().get(1));
|
||||
if (sameTypes(srcT, srcType) && sameTypes(tgtT, tgtType)) {
|
||||
return grp;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private String resolveInputType(StructureMap map, StructureMapGroupInputComponent input) {
|
||||
String type = input.getType();
|
||||
if (type == null) {
|
||||
return null;
|
||||
}
|
||||
for (StructureMapStructureComponent structure : map.getStructure()) {
|
||||
if (type.equals(structure.getAlias())) {
|
||||
return structure.getUrl();
|
||||
}
|
||||
}
|
||||
StructureDefinition sd = context.fetchTypeDefinition(type);
|
||||
return sd == null ? null : sd.getUrl();
|
||||
}
|
||||
|
||||
private String resolveInputType(Element src, Element input) {
|
||||
String type = input.getChildValue("type");
|
||||
if (type == null) {
|
||||
return null;
|
||||
}
|
||||
for (Element structure : input.getChildren("structure")) {
|
||||
if (type.equals(structure.getChildValue("alias"))) {
|
||||
return structure.getChildValue("url");
|
||||
}
|
||||
}
|
||||
StructureDefinition sd = context.fetchTypeDefinition(type);
|
||||
return sd == null ? null : sd.getUrl();
|
||||
}
|
||||
|
||||
private boolean typesMatch(VariableDefn v, String type) {
|
||||
if (type == null) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private VariableDefn getParameter(List<ValidationMessage> errors, Element param, NodeStack pstack, VariableSet variables, StructureMapInputMode mode) {
|
||||
Element v = param.getNamedChild("value");
|
||||
if (v.fhirType().equals("id")) {
|
||||
return variables.getVariable(v.primitiveValue(), mode == StructureMapInputMode.SOURCE);
|
||||
} else {
|
||||
String type = v.fhirType();
|
||||
StructureDefinition sd = context.fetchTypeDefinition(type);
|
||||
return new VariableDefn("$", "source").setType(1, sd, sd.getSnapshot().getElementFirstRep(), null);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,16 +1,22 @@
|
|||
package org.hl7.fhir.validation.instance.utils;
|
||||
|
||||
import org.hl7.fhir.utilities.VersionUtilities;
|
||||
|
||||
public class FHIRPathExpressionFixer {
|
||||
|
||||
|
||||
public static String fixExpr(String expr, String key) {
|
||||
public static String fixExpr(String expr, String key, String version) {
|
||||
// this is a hack work around for past publication of wrong FHIRPath expressions
|
||||
// R4
|
||||
// waiting for 4.0.2
|
||||
|
||||
boolean r5 = VersionUtilities.isR5Ver(version);
|
||||
// if (r5) {
|
||||
// return expr;
|
||||
// }
|
||||
|
||||
//TODO is this expression below correct? @grahamegrieve
|
||||
if ("probability is decimal implies (probability as decimal) <= 100".equals(expr)) {
|
||||
return "probability.empty() or ((probability is decimal) implies ((probability as decimal) <= 100))";
|
||||
}
|
||||
// if ("probability is decimal implies (probability as decimal) <= 100".equals(expr)) {
|
||||
// return "probability.empty() or ((probability is decimal) implies ((probability as decimal) <= 100))";
|
||||
// }
|
||||
if ("enableWhen.count() > 2 implies enableBehavior.exists()".equals(expr)) {
|
||||
return "enableWhen.count() >= 2 implies enableBehavior.exists()";
|
||||
}
|
||||
|
@ -52,10 +58,10 @@ public class FHIRPathExpressionFixer {
|
|||
}
|
||||
|
||||
// clarification in FHIRPath spec
|
||||
if ("eld-19".equals(key)) {
|
||||
if (!r5 && "eld-19".equals(key)) {
|
||||
return "path.matches('^[^\\\\s\\\\.,:;\\\\\\'\"\\\\/|?!@#$%&*()\\\\[\\\\]{}]{1,64}(\\\\.[^\\\\s\\\\.,:;\\\\\\'\"\\\\/|?!@#$%&*()\\\\[\\\\]{}]{1,64}(\\\\[x\\\\])?(\\\\:[^\\\\s\\\\.]+)?)*$')";
|
||||
}
|
||||
if ("eld-20".equals(key)) {
|
||||
if (!r5 && "eld-20".equals(key)) {
|
||||
return "path.matches('^[A-Za-z][A-Za-z0-9]*(\\\\.[a-z][A-Za-z0-9]*(\\\\[x])?)*$')";
|
||||
}
|
||||
|
||||
|
|
|
@ -79,3 +79,13 @@ v: {
|
|||
"system" : "http://unitsofmeasure.org"
|
||||
}
|
||||
-------------------------------------------------------------------------------------
|
||||
{"code" : {
|
||||
"system" : "http://unitsofmeasure.org",
|
||||
"code" : "cm"
|
||||
}, "valueSet" :null, "lang":"null", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"false"}####
|
||||
v: {
|
||||
"display" : "cm",
|
||||
"code" : "cm",
|
||||
"system" : "http://unitsofmeasure.org"
|
||||
}
|
||||
-------------------------------------------------------------------------------------
|
||||
|
|
22
pom.xml
22
pom.xml
|
@ -14,7 +14,7 @@
|
|||
HAPI FHIR
|
||||
-->
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>5.6.99-SNAPSHOT</version>
|
||||
<version>5.6.100-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<properties>
|
||||
|
@ -24,6 +24,7 @@
|
|||
<junit_platform_launcher_version>1.8.2</junit_platform_launcher_version>
|
||||
<maven_surefire_version>3.0.0-M5</maven_surefire_version>
|
||||
<maven_clean_version>3.1.0</maven_clean_version>
|
||||
<okhttp.version>4.9.3</okhttp.version>
|
||||
<jacoco_version>0.8.8</jacoco_version>
|
||||
<info_cqframework_version>1.5.1</info_cqframework_version>
|
||||
<lombok_version>1.18.22</lombok_version>
|
||||
|
@ -36,6 +37,7 @@
|
|||
<maven.compiler.testRelease>11</maven.compiler.testRelease>
|
||||
<maven.compiler.testSource>11</maven.compiler.testSource>
|
||||
<maven.compiler.testTarget>11</maven.compiler.testTarget>
|
||||
<maven-jar-plugin.version>3.2.2</maven-jar-plugin.version>
|
||||
</properties>
|
||||
|
||||
<name>HL7 Core Artifacts</name>
|
||||
|
@ -182,9 +184,22 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>checkstyle</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
<failsOnError>true</failsOnError>
|
||||
<suppressionsLocation>${project.basedir}/checkstyle_suppressions.xml</suppressionsLocation>
|
||||
<enableRulesSummary>true</enableRulesSummary>
|
||||
<enableSeveritySummary>true</enableSeveritySummary>
|
||||
<consoleOutput>true</consoleOutput>
|
||||
<configLocation>${project.basedir}/../checkstyle.xml</configLocation>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
|
@ -228,7 +243,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<version>${maven-jar-plugin.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
|
@ -415,6 +430,7 @@
|
|||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
<profiles>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
jobs:
|
||||
- ${{ each image in parameters.images }}:
|
||||
- job:
|
||||
|
||||
dependsOn: [ 'setup' ]
|
||||
displayName: ${{image.displayName}}
|
||||
|
||||
pool:
|
||||
|
@ -9,15 +9,21 @@ jobs:
|
|||
|
||||
variables:
|
||||
currentImage: ${{image.vmImage}}
|
||||
currentName: ${{image.displayName}}
|
||||
codecov: $(CODECOV_TOKEN)
|
||||
VERSION:
|
||||
JAVA_TOOL_OPTIONS: ${{image.javaToolOptions}}
|
||||
|
||||
steps:
|
||||
# Runs 'mvn clean install'
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: maven | $(Build.BuildId)
|
||||
path: $(MAVEN_CACHE_FOLDER)
|
||||
# Runs 'mvn install'
|
||||
- task: Maven@3
|
||||
inputs:
|
||||
mavenPomFile: 'pom.xml'
|
||||
options: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
|
||||
mavenOptions: '-Xmx3072m'
|
||||
javaHomeOption: 'JDKVersion'
|
||||
jdkVersionOption: '${{image.jdkVersion}}'
|
||||
|
@ -33,7 +39,7 @@ jobs:
|
|||
javaHomeOption: 'JDKVersion'
|
||||
jdkVersionOption: '${{image.jdkVersion}}'
|
||||
jdkArchitectureOption: 'x64'
|
||||
options: '-pl org.hl7.fhir.validation.cli'
|
||||
options: '-pl org.hl7.fhir.validation.cli -Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
|
||||
publishJUnitResults: false
|
||||
testResultsFiles: '**/surefire-reports/TEST-*.xml'
|
||||
goals: 'exec:exec'
|
||||
|
@ -41,13 +47,14 @@ jobs:
|
|||
# Upload test results to codecov
|
||||
- script: bash <(curl https://codecov.io/bash) -t $(codecov)
|
||||
displayName: 'codecov Bash Uploader'
|
||||
condition: eq(variables.currentImage, 'ubuntu-latest')
|
||||
condition: eq(variables.currentName, 'ubuntu-latest-java-11')
|
||||
|
||||
# Publishes the test results to build artifacts.
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish JaCoCo test results'
|
||||
condition: eq(variables.currentImage, 'ubuntu-latest')
|
||||
condition: eq(variables.currentName, 'ubuntu-latest-java-11')
|
||||
inputs:
|
||||
codeCoverageTool: 'JaCoCo'
|
||||
summaryFileLocation: '$(System.DefaultWorkingDirectory)/org.hl7.fhir.report/target/site/jacoco-aggregate/jacoco.xml'
|
||||
reportDirectory: '$(System.DefaultWorkingDirectory)/org.hl7.fhir.report/target/site/jacoco-aggregate/'
|
||||
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
variables:
|
||||
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
|
||||
|
||||
trigger: none
|
||||
|
||||
pr:
|
||||
|
@ -7,6 +10,41 @@ pr:
|
|||
# Different users have different machine setups, we run the build three times, on ubuntu, osx, and windows.
|
||||
# Azure doesn't always have the same Java versions on each system, so they are enumerated for each system independently.
|
||||
jobs:
|
||||
- job: setup
|
||||
displayName: cache-maven-dependencies
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: 1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: maven | $(Build.BuildId)
|
||||
path: $(MAVEN_CACHE_FOLDER)
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
script: mkdir -p $(MAVEN_CACHE_FOLDER); pwd; ls -al $(MAVEN_CACHE_FOLDER)
|
||||
- task: Maven@3
|
||||
inputs:
|
||||
mavenPomFile: 'pom.xml'
|
||||
options: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
|
||||
mavenOptions: '-Xmx3072m'
|
||||
javaHomeOption: 'JDKVersion'
|
||||
jdkVersionOption: '1.11'
|
||||
jdkArchitectureOption: 'x64'
|
||||
publishJUnitResults: false
|
||||
goals: 'dependency:resolve'
|
||||
- task: Maven@3
|
||||
inputs:
|
||||
mavenPomFile: 'pom.xml'
|
||||
options: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
|
||||
mavenOptions: '-Xmx3072m'
|
||||
javaHomeOption: 'JDKVersion'
|
||||
jdkVersionOption: '1.11'
|
||||
jdkArchitectureOption: 'x64'
|
||||
publishJUnitResults: false
|
||||
goals: 'dependency:resolve-plugins'
|
||||
- template: pull-request-pipeline-parameterized.yml
|
||||
parameters:
|
||||
images:
|
||||
|
|
Loading…
Reference in New Issue