Merge branch 'master' of https://github.com/hapifhir/org.hl7.fhir.core
This commit is contained in:
commit
c424bf7516
|
@ -10,6 +10,7 @@ Apache-2.0
|
|||
Apache 2
|
||||
Apache 2.0
|
||||
Apache License 2.0
|
||||
Apache License version 2.0
|
||||
Eclipse Public License v2.0
|
||||
BSD licence
|
||||
The BSD License
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package org.hl7.fhir.convertors.conv40_50.datatypes40_50;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.hl7.fhir.convertors.context.ConversionContext40_50;
|
||||
import org.hl7.fhir.convertors.conv40_50.datatypes40_50.special40_50.Extension40_50;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
|
@ -8,14 +10,22 @@ public class BackboneElement40_50 {
|
|||
public static void copyBackboneElement(org.hl7.fhir.r4.model.BackboneElement src, org.hl7.fhir.r5.model.BackboneElement tgt, String ... extensionUrlsToIgnore) throws FHIRException {
|
||||
ConversionContext40_50.INSTANCE.getVersionConvertor_40_50().copyElement(src, tgt, extensionUrlsToIgnore);
|
||||
for (org.hl7.fhir.r4.model.Extension e : src.getModifierExtension()) {
|
||||
tgt.addModifierExtension(Extension40_50.convertExtension(e));
|
||||
if (!isExemptExtension(e.getUrl(), extensionUrlsToIgnore)) {
|
||||
tgt.addModifierExtension(Extension40_50.convertExtension(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isExemptExtension(String url, String[] extensionsToIgnore) {
|
||||
return Arrays.asList(extensionsToIgnore).contains(url);
|
||||
}
|
||||
|
||||
public static void copyBackboneElement(org.hl7.fhir.r5.model.BackboneElement src, org.hl7.fhir.r4.model.BackboneElement tgt, String... extensionUrlsToIgnore) throws FHIRException {
|
||||
ConversionContext40_50.INSTANCE.getVersionConvertor_40_50().copyElement(src, tgt, extensionUrlsToIgnore);
|
||||
for (org.hl7.fhir.r5.model.Extension e : src.getModifierExtension()) {
|
||||
tgt.addModifierExtension(Extension40_50.convertExtension(e));
|
||||
if (!isExemptExtension(e.getUrl(), extensionUrlsToIgnore)) {
|
||||
tgt.addModifierExtension(Extension40_50.convertExtension(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -207,6 +207,9 @@ public class ConceptMap40_50 {
|
|||
for (org.hl7.fhir.r4.model.ConceptMap.TargetElementComponent t : src.getTarget()) {
|
||||
if (t.getEquivalence() == org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence.UNMATCHED) {
|
||||
tgt.setNoMap(true);
|
||||
if (t.hasComment()) {
|
||||
tgt.addExtension("http://hl7.org/fhir/4.0/StructureDefinition/extension-ConceptMap.group.element.target.comment", ConversionContext40_50.INSTANCE.getVersionConvertor_40_50().convertType(t.getCommentElement()));
|
||||
}
|
||||
} else {
|
||||
tgt.addTarget(convertTargetElementComponent(t, tgtMap));
|
||||
}
|
||||
|
@ -218,13 +221,18 @@ public class ConceptMap40_50 {
|
|||
if (src == null)
|
||||
return null;
|
||||
org.hl7.fhir.r4.model.ConceptMap.SourceElementComponent tgt = new org.hl7.fhir.r4.model.ConceptMap.SourceElementComponent();
|
||||
ConversionContext40_50.INSTANCE.getVersionConvertor_40_50().copyBackboneElement(src, tgt);
|
||||
ConversionContext40_50.INSTANCE.getVersionConvertor_40_50().copyBackboneElement(src, tgt, "http://hl7.org/fhir/4.0/StructureDefinition/extension-ConceptMap.group.element.target.comment");
|
||||
if (src.hasCode())
|
||||
tgt.setCodeElement(Code40_50.convertCode(src.getCodeElement()));
|
||||
if (src.hasDisplay())
|
||||
tgt.setDisplayElement(String40_50.convertString(src.getDisplayElement()));
|
||||
if (src.hasNoMap() && src.getNoMap() == true) {
|
||||
tgt.addTarget(new org.hl7.fhir.r4.model.ConceptMap.TargetElementComponent().setEquivalence(org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence.UNMATCHED));
|
||||
org.hl7.fhir.r4.model.ConceptMap.TargetElementComponent t = new org.hl7.fhir.r4.model.ConceptMap.TargetElementComponent();
|
||||
t.setEquivalence(org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence.UNMATCHED);
|
||||
if (src.hasExtension("http://hl7.org/fhir/4.0/StructureDefinition/extension-ConceptMap.group.element.target.comment")) {
|
||||
t.setCommentElement((org.hl7.fhir.r4.model.StringType) ConversionContext40_50.INSTANCE.getVersionConvertor_40_50().convertType(src.getExtensionByUrl("http://hl7.org/fhir/4.0/StructureDefinition/extension-ConceptMap.group.element.target.comment").getValue()));
|
||||
}
|
||||
tgt.addTarget(t);
|
||||
} else {
|
||||
for (org.hl7.fhir.r5.model.ConceptMap.TargetElementComponent t : src.getTarget())
|
||||
tgt.addTarget(convertTargetElementComponent(t, srcMap));
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.hl7.fhir.exceptions.FHIRException;
|
|||
import org.hl7.fhir.r4.formats.IParser.OutputStyle;
|
||||
import org.hl7.fhir.r4.formats.JsonParser;
|
||||
import org.hl7.fhir.r4.model.CapabilityStatement;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||
import org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity;
|
||||
|
@ -60,8 +61,18 @@ public class VSACImporter extends OIDBasedValueSetImporter {
|
|||
|
||||
CapabilityStatement cs = fhirToolingClient.getCapabilitiesStatement();
|
||||
JsonParser json = new JsonParser();
|
||||
json.setOutputStyle(OutputStyle.PRETTY).compose(ManagedFileAccess.outStream(Utilities.path("[tmp]", "vsac-capability-statmenet.json")), cs);
|
||||
json.setOutputStyle(OutputStyle.PRETTY).compose(ManagedFileAccess.outStream(Utilities.path("[tmp]", "vsac-capability-statement.json")), cs);
|
||||
|
||||
System.out.println("CodeSystems");
|
||||
CodeSystem css = fhirToolingClient.fetchResource(CodeSystem.class, "CDCNHSN");
|
||||
json.setOutputStyle(OutputStyle.PRETTY).compose(ManagedFileAccess.outStream(Utilities.path(dest, "CodeSystem-CDCNHSN.json")), css);
|
||||
css = fhirToolingClient.fetchResource(CodeSystem.class, "CDCREC");
|
||||
json.setOutputStyle(OutputStyle.PRETTY).compose(ManagedFileAccess.outStream(Utilities.path(dest, "CodeSystem-CDCREC.json")), css);
|
||||
css = fhirToolingClient.fetchResource(CodeSystem.class, "HSLOC");
|
||||
json.setOutputStyle(OutputStyle.PRETTY).compose(ManagedFileAccess.outStream(Utilities.path(dest, "CodeSystem-HSLOC.json")), css);
|
||||
css = fhirToolingClient.fetchResource(CodeSystem.class, "SOP");
|
||||
json.setOutputStyle(OutputStyle.PRETTY).compose(ManagedFileAccess.outStream(Utilities.path(dest, "CodeSystem-SOP.json")), css);
|
||||
|
||||
System.out.println("Loading");
|
||||
List<String> oids = new ArrayList<>();
|
||||
while (csv.line()) {
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
package org.hl7.fhir.convertors.conv40_50;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_40_50;
|
||||
import org.hl7.fhir.r4.formats.IParser.OutputStyle;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class ConceptMap40_50Test {
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName("Test r5 -> r4 ConceptMap conversion.")
|
||||
public void testR5_R4() throws IOException {
|
||||
InputStream r4_input = this.getClass().getResourceAsStream("/cm_nomap.json");
|
||||
|
||||
org.hl7.fhir.r4.model.ConceptMap r4_actual = (org.hl7.fhir.r4.model.ConceptMap) new org.hl7.fhir.r4.formats.JsonParser().parse(r4_input);
|
||||
org.hl7.fhir.r5.model.Resource r5_conv = VersionConvertorFactory_40_50.convertResource(r4_actual);
|
||||
org.hl7.fhir.r4.model.Resource r4_conv = VersionConvertorFactory_40_50.convertResource(r5_conv);
|
||||
|
||||
System.out.println(new org.hl7.fhir.r4.formats.JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(r4_actual));
|
||||
System.out.println(new org.hl7.fhir.r4.formats.JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(r4_conv));
|
||||
assertTrue(r4_actual.equalsDeep(r4_conv), "should be the same");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"resourceType": "ConceptMap",
|
||||
"id": "appointment-status-concept-map",
|
||||
"url": "https://hl7.fi/fhir/finnish-scheduling/ConceptMap/appointment-status-concept-map",
|
||||
"title": "FHIR Appointment status codes and Ajanvaraus - Ajanvarauksen tila",
|
||||
"description": "Mapping between the Finnish logical model [*Ajanvaraus - Ajanvarauksen tila*](https://koodistopalvelu.kanta.fi/codeserver/pages/classification-view-page.xhtml?classificationKey=1943) (oid `1.2.246.537.6.881`) and FHIR Appoinment status codes, in both directions.",
|
||||
"status": "draft",
|
||||
"sourceUri": "https://koodistopalvelu.kanta.fi/codeserver/pages/classification-view-page.xhtml?classificationKey=1943",
|
||||
"targetCanonical": "http://hl7.org/fhir/appointmentstatus",
|
||||
"group": [{
|
||||
"source": "https://koodistopalvelu.kanta.fi/codeserver/pages/classification-view-page.xhtml?classificationKey=1943",
|
||||
"target": "http://hl7.org/fhir/appointmentstatus",
|
||||
"element": [{
|
||||
"code": "5",
|
||||
"display": "Siirretty",
|
||||
"target": [{
|
||||
"equivalence": "unmatched",
|
||||
"comment": "There is no status for rescheduled appointments in FHIR. This code SHOULD be mapped to cancelled, if required. Note that this code is deprecated."
|
||||
}]
|
||||
}, {
|
||||
"code" : "3",
|
||||
"display" : "Varattu",
|
||||
"target" : [{
|
||||
"code" : "booked",
|
||||
"display" : "Booked",
|
||||
"equivalence" : "equal"
|
||||
}]
|
||||
}]
|
||||
]}
|
||||
}
|
|
@ -11,6 +11,7 @@ import java.util.Map;
|
|||
import org.hl7.fhir.utilities.IniFile;
|
||||
import org.hl7.fhir.utilities.TextFile;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.filesystem.ManagedFileAccess;
|
||||
|
||||
public class Configuration {
|
||||
public static final SimpleDateFormat DATE_FORMAT() {
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.hl7.fhir.core.generator.engine.Definitions;
|
|||
import org.hl7.fhir.r5.conformance.profile.ProfileUtilities;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.filesystem.ManagedFileAccess;
|
||||
|
||||
public class JavaExtensionsGenerator {
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.hl7.fhir.r5.model.StructureDefinition.TypeDerivationRule;
|
|||
import org.hl7.fhir.r5.model.ValueSet;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.VersionUtilities;
|
||||
import org.hl7.fhir.utilities.filesystem.ManagedFileAccess;
|
||||
import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager;
|
||||
import org.hl7.fhir.utilities.npm.NpmPackage;
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ public class XmlParser extends ParserBase {
|
|||
factory.setNamespaceAware(true);
|
||||
if (policy == ValidationPolicy.EVERYTHING) {
|
||||
// use a slower parser that keeps location data
|
||||
TransformerFactory transformerFactory = TransformerFactory.newInstance();
|
||||
TransformerFactory transformerFactory = XMLUtil.newXXEProtectedTransformerFactory();
|
||||
Transformer nullTransformer = transformerFactory.newTransformer();
|
||||
DocumentBuilder docBuilder = factory.newDocumentBuilder();
|
||||
doc = docBuilder.newDocument();
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,33 +1,33 @@
|
|||
package org.hl7.fhir.dstu3.elementmodel;
|
||||
|
||||
/*
|
||||
Copyright (c) 2011+, HL7, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of HL7 nor the names of its contributors may be used to
|
||||
endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
/*
|
||||
Copyright (c) 2011+, HL7, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of HL7 nor the names of its contributors may be used to
|
||||
endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
|
@ -109,7 +109,7 @@ public class XmlParser extends ParserBase {
|
|||
factory.setNamespaceAware(true);
|
||||
if (policy == ValidationPolicy.EVERYTHING) {
|
||||
// use a slower parser that keeps location data
|
||||
TransformerFactory transformerFactory = TransformerFactory.newInstance();
|
||||
TransformerFactory transformerFactory = XMLUtil.newXXEProtectedTransformerFactory();
|
||||
Transformer nullTransformer = transformerFactory.newTransformer();
|
||||
DocumentBuilder docBuilder = factory.newDocumentBuilder();
|
||||
doc = docBuilder.newDocument();
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ public class XmlParser extends ParserBase {
|
|||
factory.setNamespaceAware(true);
|
||||
if (policy == ValidationPolicy.EVERYTHING) {
|
||||
// use a slower parser that keeps location data
|
||||
TransformerFactory transformerFactory = TransformerFactory.newInstance();
|
||||
TransformerFactory transformerFactory = XMLUtil.newXXEProtectedTransformerFactory();
|
||||
Transformer nullTransformer = transformerFactory.newTransformer();
|
||||
DocumentBuilder docBuilder = factory.newDocumentBuilder();
|
||||
doc = docBuilder.newDocument();
|
||||
|
|
|
@ -3658,23 +3658,22 @@ public class FHIRPathEngine {
|
|||
|
||||
case LowBoundary:
|
||||
case HighBoundary: {
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp);
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp, true);
|
||||
if (paramTypes.size() > 0) {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes,
|
||||
new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer));
|
||||
}
|
||||
if (focus.hasType("decimal")
|
||||
&& (focus.hasType("date") || focus.hasType("datetime") || focus.hasType("instant"))) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal, TypeDetails.FP_DateTime);
|
||||
} else if (focus.hasType("decimal")) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal);
|
||||
if ((focus.hasType("date") || focus.hasType("datetime") || focus.hasType("instant"))) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal, TypeDetails.FP_DateTime);
|
||||
} else if (focus.hasType("decimal") || focus.hasType("integer")) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal);
|
||||
} else {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_DateTime);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_DateTime);
|
||||
}
|
||||
}
|
||||
case Precision: {
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer);
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp, false);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer);
|
||||
}
|
||||
|
||||
case Custom: {
|
||||
|
@ -3778,9 +3777,8 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
|
||||
private void checkContextContinuous(TypeDetails focus, String name, ExpressionNode expr) throws PathEngineException {
|
||||
if (!focus.hasNoTypes() && !focus.hasType("decimal") && !focus.hasType("date") && !focus.hasType("dateTime")
|
||||
&& !focus.hasType("time") && !focus.hasType("Quantity")) {
|
||||
private void checkContextContinuous(TypeDetails focus, String name, ExpressionNode expr, boolean allowInteger) throws PathEngineException {
|
||||
if (!focus.hasNoTypes() && !focus.hasType("decimal") && !focus.hasType("date") && !focus.hasType("dateTime") && !focus.hasType("time") && !focus.hasType("Quantity") && !(allowInteger && focus.hasType("integer"))) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_CONTINUOUS_ONLY, name, focus.describe());
|
||||
}
|
||||
}
|
||||
|
@ -4223,42 +4221,43 @@ public class FHIRPathEngine {
|
|||
if (focus.size() > 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "lowBoundary", focus.size());
|
||||
}
|
||||
int precision = 0;
|
||||
Integer precision = null;
|
||||
if (expr.getParameters().size() > 0) {
|
||||
List<Base> n1 = execute(context, focus, expr.getParameters().get(0), true);
|
||||
if (n1.size() != 1) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values",
|
||||
"integer");
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values", "integer");
|
||||
}
|
||||
precision = Integer.parseInt(n1.get(0).primitiveValue());
|
||||
}
|
||||
|
||||
|
||||
Base base = focus.get(0);
|
||||
List<Base> result = new ArrayList<Base>();
|
||||
|
||||
|
||||
if (base.hasType("decimal")) {
|
||||
result
|
||||
.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == 0 ? 8 : precision)));
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("integer")) {
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("date")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == 0 ? 10 : precision)));
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == null ? 10 : precision)));
|
||||
} else if (base.hasType("dateTime")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == 0 ? 17 : precision)));
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == null ? 17 : precision)));
|
||||
} else if (base.hasType("time")) {
|
||||
result.add(new TimeType(Utilities.lowBoundaryForTime(base.primitiveValue(), precision == 0 ? 9 : precision)));
|
||||
result.add(new TimeType(Utilities.lowBoundaryForTime(base.primitiveValue(), precision == null ? 9 : precision)));
|
||||
} else if (base.hasType("Quantity")) {
|
||||
String value = getNamedValue(base, "value");
|
||||
Base v = base.copy();
|
||||
v.setProperty("value", new DecimalType(Utilities.lowBoundaryForDecimal(value, precision == 0 ? 8 : precision)));
|
||||
v.setProperty("value", new DecimalType(Utilities.lowBoundaryForDecimal(value, precision == null ? 8 : precision)));
|
||||
result.add(v);
|
||||
} else {
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(),
|
||||
"decimal or date");
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(), "decimal or date");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private List<Base> funcHighBoundary(ExecutionContext context, List<Base> focus, ExpressionNode expr) {
|
||||
if (focus.size() == 0) {
|
||||
return makeNull();
|
||||
|
@ -4266,41 +4265,43 @@ public class FHIRPathEngine {
|
|||
if (focus.size() > 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "highBoundary", focus.size());
|
||||
}
|
||||
int precision = 0;
|
||||
Integer precision = null;
|
||||
if (expr.getParameters().size() > 0) {
|
||||
List<Base> n1 = execute(context, focus, expr.getParameters().get(0), true);
|
||||
if (n1.size() != 1) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values",
|
||||
"integer");
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values", "integer");
|
||||
}
|
||||
precision = Integer.parseInt(n1.get(0).primitiveValue());
|
||||
}
|
||||
|
||||
|
||||
|
||||
Base base = focus.get(0);
|
||||
List<Base> result = new ArrayList<Base>();
|
||||
if (base.hasType("decimal")) {
|
||||
result.add(
|
||||
new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == 0 ? 8 : precision)));
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("integer")) {
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("date")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == 0 ? 10 : precision)));
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == null ? 10 : precision)));
|
||||
} else if (base.hasType("dateTime")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == 0 ? 17 : precision)));
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == null ? 17 : precision)));
|
||||
} else if (base.hasType("time")) {
|
||||
result.add(new TimeType(Utilities.highBoundaryForTime(base.primitiveValue(), precision == 0 ? 9 : precision)));
|
||||
result.add(new TimeType(Utilities.highBoundaryForTime(base.primitiveValue(), precision == null ? 9 : precision)));
|
||||
} else if (base.hasType("Quantity")) {
|
||||
String value = getNamedValue(base, "value");
|
||||
Base v = base.copy();
|
||||
v.setProperty("value", new DecimalType(Utilities.highBoundaryForDecimal(value, precision == 0 ? 8 : precision)));
|
||||
v.setProperty("value", new DecimalType(Utilities.highBoundaryForDecimal(value, precision == null ? 8 : precision)));
|
||||
result.add(v);
|
||||
} else {
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(),
|
||||
"decimal or date");
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(), "decimal or date");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private List<Base> funcPrecision(ExecutionContext context, List<Base> focus, ExpressionNode expr) {
|
||||
if (focus.size() != 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "highBoundary", focus.size());
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ public class XmlParser extends ParserBase {
|
|||
stream.reset();
|
||||
}
|
||||
// use a slower parser that keeps location data
|
||||
TransformerFactory transformerFactory = TransformerFactory.newInstance();
|
||||
TransformerFactory transformerFactory = XMLUtil.newXXEProtectedTransformerFactory();
|
||||
Transformer nullTransformer = transformerFactory.newTransformer();
|
||||
DocumentBuilder docBuilder = factory.newDocumentBuilder();
|
||||
doc = docBuilder.newDocument();
|
||||
|
@ -233,6 +233,8 @@ public class XmlParser extends ParserBase {
|
|||
return "sdtc:";
|
||||
if (ns.equals("urn:ihe:pharm"))
|
||||
return "pharm:";
|
||||
if (ns.equals("http://ns.electronichealth.net.au/Ci/Cda/Extensions/3.0"))
|
||||
return "ext:";
|
||||
return "?:";
|
||||
}
|
||||
|
||||
|
|
|
@ -3660,23 +3660,22 @@ public class FHIRPathEngine {
|
|||
|
||||
case LowBoundary:
|
||||
case HighBoundary: {
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp);
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp, true);
|
||||
if (paramTypes.size() > 0) {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes,
|
||||
new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer));
|
||||
}
|
||||
if (focus.hasType("decimal")
|
||||
&& (focus.hasType("date") || focus.hasType("datetime") || focus.hasType("instant"))) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal, TypeDetails.FP_DateTime);
|
||||
} else if (focus.hasType("decimal")) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal);
|
||||
if ((focus.hasType("date") || focus.hasType("datetime") || focus.hasType("instant"))) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal, TypeDetails.FP_DateTime);
|
||||
} else if (focus.hasType("decimal") || focus.hasType("integer")) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal);
|
||||
} else {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_DateTime);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_DateTime);
|
||||
}
|
||||
}
|
||||
case Precision: {
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer);
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp, false);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer);
|
||||
}
|
||||
|
||||
case Custom: {
|
||||
|
@ -3780,9 +3779,8 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
|
||||
private void checkContextContinuous(TypeDetails focus, String name, ExpressionNode expr) throws PathEngineException {
|
||||
if (!focus.hasNoTypes() && !focus.hasType("decimal") && !focus.hasType("date") && !focus.hasType("dateTime")
|
||||
&& !focus.hasType("time") && !focus.hasType("Quantity")) {
|
||||
private void checkContextContinuous(TypeDetails focus, String name, ExpressionNode expr, boolean allowInteger) throws PathEngineException {
|
||||
if (!focus.hasNoTypes() && !focus.hasType("decimal") && !focus.hasType("date") && !focus.hasType("dateTime") && !focus.hasType("time") && !focus.hasType("Quantity") && !(allowInteger && focus.hasType("integer"))) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_CONTINUOUS_ONLY, name, focus.describe());
|
||||
}
|
||||
}
|
||||
|
@ -4233,42 +4231,43 @@ public class FHIRPathEngine {
|
|||
if (focus.size() > 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "lowBoundary", focus.size());
|
||||
}
|
||||
int precision = 0;
|
||||
Integer precision = null;
|
||||
if (expr.getParameters().size() > 0) {
|
||||
List<Base> n1 = execute(context, focus, expr.getParameters().get(0), true);
|
||||
if (n1.size() != 1) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values",
|
||||
"integer");
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values", "integer");
|
||||
}
|
||||
precision = Integer.parseInt(n1.get(0).primitiveValue());
|
||||
}
|
||||
|
||||
|
||||
Base base = focus.get(0);
|
||||
List<Base> result = new ArrayList<Base>();
|
||||
|
||||
|
||||
if (base.hasType("decimal")) {
|
||||
result
|
||||
.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == 0 ? 8 : precision)));
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("integer")) {
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("date")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == 0 ? 10 : precision)));
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == null ? 10 : precision)));
|
||||
} else if (base.hasType("dateTime")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == 0 ? 17 : precision)));
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == null ? 17 : precision)));
|
||||
} else if (base.hasType("time")) {
|
||||
result.add(new TimeType(Utilities.lowBoundaryForTime(base.primitiveValue(), precision == 0 ? 9 : precision)));
|
||||
result.add(new TimeType(Utilities.lowBoundaryForTime(base.primitiveValue(), precision == null ? 9 : precision)));
|
||||
} else if (base.hasType("Quantity")) {
|
||||
String value = getNamedValue(base, "value");
|
||||
Base v = base.copy();
|
||||
v.setProperty("value", new DecimalType(Utilities.lowBoundaryForDecimal(value, precision == 0 ? 8 : precision)));
|
||||
v.setProperty("value", new DecimalType(Utilities.lowBoundaryForDecimal(value, precision == null ? 8 : precision)));
|
||||
result.add(v);
|
||||
} else {
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(),
|
||||
"decimal or date");
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(), "decimal or date");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private List<Base> funcHighBoundary(ExecutionContext context, List<Base> focus, ExpressionNode expr) {
|
||||
if (focus.size() == 0) {
|
||||
return makeNull();
|
||||
|
@ -4276,41 +4275,43 @@ public class FHIRPathEngine {
|
|||
if (focus.size() > 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "highBoundary", focus.size());
|
||||
}
|
||||
int precision = 0;
|
||||
Integer precision = null;
|
||||
if (expr.getParameters().size() > 0) {
|
||||
List<Base> n1 = execute(context, focus, expr.getParameters().get(0), true);
|
||||
if (n1.size() != 1) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values",
|
||||
"integer");
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "lowBoundary", "0", "Multiple Values", "integer");
|
||||
}
|
||||
precision = Integer.parseInt(n1.get(0).primitiveValue());
|
||||
}
|
||||
|
||||
|
||||
|
||||
Base base = focus.get(0);
|
||||
List<Base> result = new ArrayList<Base>();
|
||||
if (base.hasType("decimal")) {
|
||||
result.add(
|
||||
new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == 0 ? 8 : precision)));
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("integer")) {
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("date")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == 0 ? 10 : precision)));
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == null ? 10 : precision)));
|
||||
} else if (base.hasType("dateTime")) {
|
||||
result
|
||||
.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == 0 ? 17 : precision)));
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == null ? 17 : precision)));
|
||||
} else if (base.hasType("time")) {
|
||||
result.add(new TimeType(Utilities.highBoundaryForTime(base.primitiveValue(), precision == 0 ? 9 : precision)));
|
||||
result.add(new TimeType(Utilities.highBoundaryForTime(base.primitiveValue(), precision == null ? 9 : precision)));
|
||||
} else if (base.hasType("Quantity")) {
|
||||
String value = getNamedValue(base, "value");
|
||||
Base v = base.copy();
|
||||
v.setProperty("value", new DecimalType(Utilities.highBoundaryForDecimal(value, precision == 0 ? 8 : precision)));
|
||||
v.setProperty("value", new DecimalType(Utilities.highBoundaryForDecimal(value, precision == null ? 8 : precision)));
|
||||
result.add(v);
|
||||
} else {
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(),
|
||||
"decimal or date");
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(), "decimal or date");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private List<Base> funcPrecision(ExecutionContext context, List<Base> focus, ExpressionNode expr) {
|
||||
if (focus.size() != 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "highBoundary", focus.size());
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -62,6 +62,18 @@ public class StructureDefinitionHacker {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (VersionUtilities.isR4Ver(version) && "http://hl7.org/fhir/StructureDefinition/ExplanationOfBenefit".equals(sd.getUrl())) {
|
||||
for (ElementDefinition ed : sd.getSnapshot().getElement()) {
|
||||
if (ed.hasBinding() && "http://terminology.hl7.org/CodeSystem/processpriority".equals(ed.getBinding().getValueSet())) {
|
||||
ed.getBinding().setValueSet("http://hl7.org/fhir/ValueSet/process-priority");
|
||||
}
|
||||
}
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement()) {
|
||||
if (ed.hasBinding() && "http://terminology.hl7.org/CodeSystem/processpriority".equals(ed.getBinding().getValueSet())) {
|
||||
ed.getBinding().setValueSet("http://hl7.org/fhir/ValueSet/process-priority");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (sd.getUrl().startsWith("http://hl7.org/fhir/uv/subscriptions-backport")) {
|
||||
for (ElementDefinition ed : sd.getDifferential().getElement()) {
|
||||
fixMarkdownR4BURLs(ed);
|
||||
|
|
|
@ -0,0 +1,227 @@
|
|||
package org.hl7.fhir.r5.conformance.profile;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.hl7.fhir.exceptions.DefinitionException;
|
||||
import org.hl7.fhir.r5.model.Element;
|
||||
import org.hl7.fhir.r5.model.ElementDefinition;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition;
|
||||
import org.hl7.fhir.r5.model.ElementDefinition.ElementDefinitionMappingComponent;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition.StructureDefinitionMappingComponent;
|
||||
import org.hl7.fhir.r5.utils.ToolingExtensions;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.VersionUtilities;
|
||||
import org.hl7.fhir.utilities.i18n.I18nConstants;
|
||||
|
||||
public class MappingAssistant {
|
||||
|
||||
|
||||
public enum MappingMergeModeOption {
|
||||
DUPLICATE, // if there's more than one mapping for the same URI, just keep them all
|
||||
IGNORE, // if there's more than one, keep the first
|
||||
OVERWRITE, // if there's opre than, keep the last
|
||||
APPEND, // if there's more than one, append them with ';'
|
||||
}
|
||||
|
||||
private MappingMergeModeOption mappingMergeMode = MappingMergeModeOption.APPEND;
|
||||
private StructureDefinition base;
|
||||
private StructureDefinition derived;
|
||||
|
||||
private List<StructureDefinitionMappingComponent> masterList= new ArrayList<StructureDefinition.StructureDefinitionMappingComponent>();
|
||||
private Map<String, String> renames = new HashMap<>();
|
||||
private String version;
|
||||
|
||||
public MappingAssistant(MappingMergeModeOption mappingMergeMode, StructureDefinition base, StructureDefinition derived, String version) {
|
||||
this.mappingMergeMode = mappingMergeMode;
|
||||
this.base = base;
|
||||
this.derived = derived;
|
||||
this.version = version;
|
||||
|
||||
// figure out where we're going to be:
|
||||
// mappings declared in derived get priority; we do not change them either
|
||||
for (StructureDefinitionMappingComponent m : derived.getMapping()) {
|
||||
masterList.add(m);
|
||||
if (!isSuppressed(m)) {
|
||||
m.setUserData("private-marked-as-derived", true);
|
||||
}
|
||||
}
|
||||
|
||||
// now, look at the base profile. If mappings in there match one in the derived, then we use that, otherwise, we add it to the list
|
||||
for (StructureDefinitionMappingComponent m : base.getMapping()) {
|
||||
StructureDefinitionMappingComponent md = findMatchInDerived(m);
|
||||
if (md == null) {
|
||||
if (nameExists(m.getIdentity())) {
|
||||
int i = 1;
|
||||
String n = m.getIdentity() + i;
|
||||
while (nameExists(n)) {
|
||||
i++;
|
||||
n = m.getIdentity() + i;
|
||||
}
|
||||
renames.put(m.getIdentity(), n);
|
||||
masterList.add(m.copy().setName(n));
|
||||
} else {
|
||||
masterList.add(m.copy());
|
||||
}
|
||||
} else {
|
||||
if (!md.hasName() && m.hasName()) {
|
||||
md.setName(m.getName());
|
||||
}
|
||||
if (!md.hasUri() && m.hasUri()) {
|
||||
md.setUri(m.getUri());
|
||||
}
|
||||
if (!md.hasComment() && m.hasComment()) {
|
||||
md.setComment(m.getComment());
|
||||
}
|
||||
if (!m.getIdentity().equals(md.getIdentity())) {
|
||||
renames.put(m.getIdentity(), md.getIdentity());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean nameExists(String n) {
|
||||
for (StructureDefinitionMappingComponent md : masterList) {
|
||||
if (n.equals(md.getIdentity())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private StructureDefinitionMappingComponent findMatchInDerived(StructureDefinitionMappingComponent m) {
|
||||
for (StructureDefinitionMappingComponent md : derived.getMapping()) {
|
||||
// if the URIs match, they match, irregardless of anything else
|
||||
if (md.hasUri() && m.hasUri() && md.getUri().equals(m.getUri())) {
|
||||
return md;
|
||||
}
|
||||
// if the codes match
|
||||
if (md.hasIdentity() && m.hasIdentity() && md.getIdentity().equals(m.getIdentity())) {
|
||||
// the names have to match if present
|
||||
if (!md.hasName() || !m.hasName() || md.getName().equals(m.getName())) {
|
||||
return md;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void update() {
|
||||
|
||||
Set<StructureDefinitionMappingComponent> usedList= new HashSet<StructureDefinition.StructureDefinitionMappingComponent>();
|
||||
for (ElementDefinition ed : derived.getSnapshot().getElement()) {
|
||||
for (ElementDefinitionMappingComponent m : ed.getMapping()) {
|
||||
StructureDefinitionMappingComponent def = findDefinition(m.getIdentity());
|
||||
if (def != null) {
|
||||
usedList.add(def);
|
||||
} else {
|
||||
// not sure what to do?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
derived.getMapping().clear();
|
||||
for (StructureDefinitionMappingComponent t : masterList) {
|
||||
if (usedList.contains(t) || t.hasUserData("private-marked-as-derived")) {
|
||||
derived.getMapping().add(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void merge(ElementDefinition base, ElementDefinition derived) {
|
||||
List<ElementDefinitionMappingComponent> list = new ArrayList<>();
|
||||
addMappings(list, base.getMapping(), renames);
|
||||
if (derived.hasMapping()) {
|
||||
addMappings(list, derived.getMapping(), null);
|
||||
}
|
||||
derived.setMapping(list);
|
||||
|
||||
// trim anything
|
||||
for (ElementDefinitionMappingComponent m : base.getMapping()) {
|
||||
if (m.hasMap()) {
|
||||
m.setMap(m.getMap().trim());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void addMappings(List<ElementDefinitionMappingComponent> destination, List<ElementDefinitionMappingComponent> source, Map<String, String> renames2) {
|
||||
for (ElementDefinitionMappingComponent s : source) {
|
||||
if (!isSuppressed(s)) {
|
||||
String name = s.getIdentity();
|
||||
if (!isSuppressed(name)) {
|
||||
if (renames2 != null && renames2.containsKey(name)) {
|
||||
name = renames2.get(name);
|
||||
}
|
||||
|
||||
boolean found = false;
|
||||
for (ElementDefinitionMappingComponent d : destination) {
|
||||
if (compareMaps(name, s, d)) {
|
||||
found = true;
|
||||
d.setUserData(ProfileUtilities.UD_DERIVATION_EQUALS, true);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
destination.add(s.setIdentity(name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSuppressed(String name) {
|
||||
StructureDefinitionMappingComponent m = findDefinition(name);
|
||||
return m != null && isSuppressed(m);
|
||||
}
|
||||
|
||||
private boolean isSuppressed(Element s) {
|
||||
return ToolingExtensions.readBoolExtension(s, ToolingExtensions.EXT_SUPPRESSED);
|
||||
}
|
||||
|
||||
private StructureDefinitionMappingComponent findDefinition(String name) {
|
||||
for (StructureDefinitionMappingComponent t : masterList) {
|
||||
if (t.getIdentity().equals(name)) {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private boolean compareMaps(String name, ElementDefinitionMappingComponent s, ElementDefinitionMappingComponent d) {
|
||||
|
||||
if (d.getIdentity().equals(name) && d.getMap().equals(s.getMap())) {
|
||||
return true;
|
||||
}
|
||||
if (VersionUtilities.isR5Plus(version)) {
|
||||
if (d.getIdentity().equals(name)) {
|
||||
switch (mappingMergeMode) {
|
||||
case APPEND:
|
||||
if (!Utilities.splitStrings(d.getMap(), "\\,").contains(s.getMap())) {
|
||||
d.setMap(d.getMap()+","+s.getMap());
|
||||
}
|
||||
return true;
|
||||
case DUPLICATE:
|
||||
return false;
|
||||
case IGNORE:
|
||||
d.setMap(s.getMap());
|
||||
return true;
|
||||
case OVERWRITE:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -128,7 +128,7 @@ public class ProfilePathProcessor {
|
|||
}
|
||||
|
||||
|
||||
protected static void processPaths(ProfileUtilities profileUtilities, StructureDefinition base, StructureDefinition derived, String url, String webUrl, StructureDefinition.StructureDefinitionDifferentialComponent differential, StructureDefinition.StructureDefinitionSnapshotComponent baseSnapshot) {
|
||||
protected static void processPaths(ProfileUtilities profileUtilities, StructureDefinition base, StructureDefinition derived, String url, String webUrl, StructureDefinition.StructureDefinitionDifferentialComponent differential, StructureDefinition.StructureDefinitionSnapshotComponent baseSnapshot, MappingAssistant mapHelper) {
|
||||
|
||||
ProfilePathProcessorState cursors = new ProfilePathProcessorState(
|
||||
baseSnapshot,
|
||||
|
@ -152,16 +152,17 @@ public class ProfilePathProcessor {
|
|||
.withRedirector(new ArrayList<ElementRedirection>())
|
||||
.withSourceStructureDefinition(base)
|
||||
.withDerived(derived)
|
||||
.withSlicing(new PathSlicingParams()).processPaths(cursors);
|
||||
.withSlicing(new PathSlicingParams()).processPaths(cursors, mapHelper);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @param cursors
|
||||
* @param mapHelper
|
||||
* @throws DefinitionException, FHIRException
|
||||
* @throws Exception
|
||||
*/
|
||||
private ElementDefinition processPaths(final ProfilePathProcessorState cursors) throws FHIRException {
|
||||
private ElementDefinition processPaths(final ProfilePathProcessorState cursors, MappingAssistant mapHelper) throws FHIRException {
|
||||
debugProcessPathsEntry(cursors);
|
||||
ElementDefinition res = null;
|
||||
List<TypeSlice> typeList = new ArrayList<>();
|
||||
|
@ -177,13 +178,13 @@ public class ProfilePathProcessor {
|
|||
// in the simple case, source is not sliced.
|
||||
if (!currentBase.hasSlicing() || currentBasePath.equals(getSlicing().getPath()))
|
||||
{
|
||||
ElementDefinition currentRes = processSimplePath(currentBase, currentBasePath, diffMatches, typeList, cursors);
|
||||
ElementDefinition currentRes = processSimplePath(currentBase, currentBasePath, diffMatches, typeList, cursors, mapHelper);
|
||||
if (res == null) {
|
||||
res = currentRes;
|
||||
}
|
||||
}
|
||||
else {
|
||||
processPathWithSlicedBase(currentBase, currentBasePath, diffMatches, typeList, cursors);
|
||||
processPathWithSlicedBase(currentBase, currentBasePath, diffMatches, typeList, cursors, mapHelper);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -238,26 +239,26 @@ public class ProfilePathProcessor {
|
|||
final String currentBasePath,
|
||||
final List<ElementDefinition> diffMatches,
|
||||
final List<TypeSlice> typeList,
|
||||
final ProfilePathProcessorState cursors) throws FHIRException {
|
||||
final ProfilePathProcessorState cursors, MappingAssistant mapHelper) throws FHIRException {
|
||||
ElementDefinition res = null;
|
||||
|
||||
// the differential doesn't say anything about this item
|
||||
// so we just copy it in
|
||||
if (diffMatches.isEmpty())
|
||||
processSimplePathWithEmptyDiffMatches(currentBase, currentBasePath, diffMatches, cursors);
|
||||
processSimplePathWithEmptyDiffMatches(currentBase, currentBasePath, diffMatches, cursors, mapHelper);
|
||||
// one matching element in the differential
|
||||
else if (oneMatchingElementInDifferential(getSlicing().isDone(), currentBasePath, diffMatches))
|
||||
res = processSimplePathWithOneMatchingElementInDifferential(currentBase, currentBasePath, diffMatches, cursors);
|
||||
res = processSimplePathWithOneMatchingElementInDifferential(currentBase, currentBasePath, diffMatches, cursors, mapHelper);
|
||||
else if (profileUtilities.diffsConstrainTypes(diffMatches, currentBasePath, typeList))
|
||||
processSimplePathWhereDiffsConstrainTypes(currentBasePath, diffMatches, typeList, cursors);
|
||||
processSimplePathWhereDiffsConstrainTypes(currentBasePath, diffMatches, typeList, cursors, mapHelper);
|
||||
else
|
||||
processSimplePathDefault(currentBase, currentBasePath, diffMatches, cursors);
|
||||
processSimplePathDefault(currentBase, currentBasePath, diffMatches, cursors, mapHelper);
|
||||
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
private void processSimplePathDefault(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors) {
|
||||
private void processSimplePathDefault(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors, MappingAssistant mapHelper) {
|
||||
// ok, the differential slices the item. Let's check our pre-conditions to ensure that this is correct
|
||||
if (!profileUtilities.unbounded(currentBase) && !profileUtilities.isSlicedToOneOnly(diffMatches.get(0)))
|
||||
// you can only slice an element that doesn't repeat if the sum total of your slices is limited to 1
|
||||
|
@ -280,7 +281,7 @@ public class ProfilePathProcessor {
|
|||
.withBaseLimit(newBaseLimit)
|
||||
.withDiffLimit(newDiffLimit)
|
||||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, 0)).withSlicing(new PathSlicingParams(true, null, null))
|
||||
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase));
|
||||
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
if (e == null)
|
||||
throw new FHIRException(profileUtilities.getContext().formatMessage(I18nConstants.DID_NOT_FIND_SINGLE_SLICE_, diffMatches.get(0).getPath()));
|
||||
e.setSlicing(diffMatches.get(0).getSlicing());
|
||||
|
@ -322,7 +323,7 @@ public class ProfilePathProcessor {
|
|||
|
||||
// differential - if the first one in the list has a name, we'll process it. Else we'll treat it as the base definition of the slice.
|
||||
if (!diffMatches.get(0).hasSliceName()) {
|
||||
profileUtilities.updateFromDefinition(outcome, diffMatches.get(0), getProfileName(), isTrimDifferential(), getUrl(),getSourceStructureDefinition(), getDerived(), diffPath(diffMatches.get(0)));
|
||||
profileUtilities.updateFromDefinition(outcome, diffMatches.get(0), getProfileName(), isTrimDifferential(), getUrl(),getSourceStructureDefinition(), getDerived(), diffPath(diffMatches.get(0)), mapHelper);
|
||||
profileUtilities.removeStatusExtensions(outcome);
|
||||
if (!outcome.hasContentReference() && !outcome.hasType() && outcome.getPath().contains(".")) {
|
||||
throw new DefinitionException(profileUtilities.getContext().formatMessage(I18nConstants.NOT_DONE_YET));
|
||||
|
@ -350,7 +351,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathSource(currentBasePath)
|
||||
.withContextPathTarget(outcome.getPath()).withSlicing(new PathSlicingParams()) /* starting again on the data type, but skip the root */
|
||||
. processPaths(new ProfilePathProcessorState(dt.getSnapshot(), 1 /* starting again on the data type, but skip the root */, start,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
}
|
||||
start++;
|
||||
|
@ -375,7 +376,7 @@ public class ProfilePathProcessor {
|
|||
.withDiffLimit(newDiffLimit)
|
||||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, i))
|
||||
.withSlicing(new PathSlicingParams(true, slicerElement, null))
|
||||
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase));
|
||||
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
// ok, done with that - next in the base list
|
||||
cursors.baseCursor = newBaseLimit + 1;
|
||||
|
@ -400,7 +401,7 @@ public class ProfilePathProcessor {
|
|||
return Base.compareDeep(s1.getDiscriminator(), s2.getDiscriminator(), false);
|
||||
}
|
||||
|
||||
private void processSimplePathWhereDiffsConstrainTypes(String currentBasePath, List<ElementDefinition> diffMatches, List<TypeSlice> typeList, ProfilePathProcessorState cursors) {
|
||||
private void processSimplePathWhereDiffsConstrainTypes(String currentBasePath, List<ElementDefinition> diffMatches, List<TypeSlice> typeList, ProfilePathProcessorState cursors, MappingAssistant mapHelper) {
|
||||
int start = 0;
|
||||
int newBaseLimit = profileUtilities.findEndOfElement(cursors.base, cursors.baseCursor);
|
||||
int newDiffCursor = getDifferential().getElement().indexOf(diffMatches.get(0));
|
||||
|
@ -498,7 +499,7 @@ public class ProfilePathProcessor {
|
|||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, 0))
|
||||
.withSlicing(new PathSlicingParams(true, null, null))
|
||||
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
if (elementDefinition == null)
|
||||
throw new FHIRException(profileUtilities.getContext().formatMessage(I18nConstants.DID_NOT_FIND_TYPE_ROOT_, path));
|
||||
// now set up slicing on the e (cause it was wiped by what we called.
|
||||
|
@ -531,7 +532,7 @@ public class ProfilePathProcessor {
|
|||
.withDiffLimit(newDiffLimit)
|
||||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, i))
|
||||
.withSlicing(new PathSlicingParams(true, elementDefinition, null))
|
||||
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase));
|
||||
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
if (typeList.size() > start + 1) {
|
||||
typeSliceElement.setMin(0);
|
||||
}
|
||||
|
@ -576,7 +577,7 @@ public class ProfilePathProcessor {
|
|||
cursors.diffCursor = newDiffLimit + 1;
|
||||
}
|
||||
|
||||
private ElementDefinition processSimplePathWithOneMatchingElementInDifferential(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors) {
|
||||
private ElementDefinition processSimplePathWithOneMatchingElementInDifferential(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors, MappingAssistant mapHelper) {
|
||||
ElementDefinition res;
|
||||
ElementDefinition template = null;
|
||||
if (diffMatches.get(0).hasType() && "Reference".equals(diffMatches.get(0).getType().get(0).getWorkingCode()) && !profileUtilities.isValidType(diffMatches.get(0).getType().get(0), currentBase)) {
|
||||
|
@ -684,7 +685,7 @@ public class ProfilePathProcessor {
|
|||
}
|
||||
}
|
||||
}
|
||||
profileUtilities.updateFromDefinition(outcome, diffMatches.get(0), getProfileName(), isTrimDifferential(), getUrl(), getSourceStructureDefinition(), getDerived(), diffPath(diffMatches.get(0)));
|
||||
profileUtilities.updateFromDefinition(outcome, diffMatches.get(0), getProfileName(), isTrimDifferential(), getUrl(), getSourceStructureDefinition(), getDerived(), diffPath(diffMatches.get(0)), mapHelper);
|
||||
profileUtilities.removeStatusExtensions(outcome);
|
||||
// if (outcome.getPath().endsWith("[x]") && outcome.getType().size() == 1 && !outcome.getType().get(0).getCode().equals("*") && !diffMatches.get(0).hasSlicing()) // if the base profile allows multiple types, but the profile only allows one, rename it
|
||||
// outcome.setPath(outcome.getPath().substring(0, outcome.getPath().length()-3)+Utilities.capitalize(outcome.getType().get(0).getCode()));
|
||||
|
@ -746,7 +747,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathSource(target.getElement().getPath())
|
||||
.withContextPathTarget(diffMatches.get(0).getPath()).withRedirector(profileUtilities.redirectorStack(getRedirector(), outcome, currentBasePath))
|
||||
.withSourceStructureDefinition(target.getSource())
|
||||
.withSlicing(new PathSlicingParams()).processPaths(new ProfilePathProcessorState(cursors.base, newBaseCursor, start - 1, cursors.contextName, cursors.resultPathBase));
|
||||
.withSlicing(new PathSlicingParams()).processPaths(new ProfilePathProcessorState(cursors.base, newBaseCursor, start - 1, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
} else {
|
||||
final int newBaseCursor = cursors.base.getElement().indexOf(target.getElement()) + 1;
|
||||
int newBaseLimit = newBaseCursor;
|
||||
|
@ -761,7 +762,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathTarget(diffMatches.get(0).getPath())
|
||||
.withRedirector(profileUtilities.redirectorStack(getRedirector(), outcome, currentBasePath))
|
||||
.withSlicing(new PathSlicingParams()).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, newBaseCursor, start - 1, cursors.contextName, cursors.resultPathBase));
|
||||
new ProfilePathProcessorState(cursors.base, newBaseCursor, start - 1, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
} else {
|
||||
StructureDefinition dt = outcome.getType().size() == 1 ? profileUtilities.getProfileForDataType(outcome.getType().get(0), getWebUrl(), getDerived()) : profileUtilities.getProfileForDataType("Element");
|
||||
|
@ -778,7 +779,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathSource(diffMatches.get(0).getPath()).withContextPathTarget(outcome.getPath()).withRedirector(new ArrayList<ElementRedirection>())
|
||||
.withSlicing(new PathSlicingParams()). /* starting again on the data type, but skip the root */
|
||||
processPaths(new ProfilePathProcessorState(dt.getSnapshot(), 1 /* starting again on the data type, but skip the root */, start,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -864,7 +865,7 @@ public class ProfilePathProcessor {
|
|||
}
|
||||
|
||||
|
||||
private void processSimplePathWithEmptyDiffMatches(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors) {
|
||||
private void processSimplePathWithEmptyDiffMatches(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors, MappingAssistant mapHelper) {
|
||||
ElementDefinition outcome = profileUtilities.updateURLs(getUrl(), getWebUrl(), currentBase.copy());
|
||||
outcome.setPath(profileUtilities.fixedPathDest(getContextPathTarget(), outcome.getPath(), getRedirector(), getContextPathSource()));
|
||||
profileUtilities.updateFromBase(outcome, currentBase, getSourceStructureDefinition().getUrl());
|
||||
|
@ -884,7 +885,7 @@ public class ProfilePathProcessor {
|
|||
// did we implicitly step into a new type?
|
||||
if (baseHasChildren(cursors.base, currentBase)) { // not a new type here
|
||||
|
||||
this.incrementDebugIndent().withSlicing(new PathSlicingParams()). processPaths( new ProfilePathProcessorState(cursors.base, cursors.baseCursor + 1, cursors.diffCursor, cursors.contextName, cursors.resultPathBase));
|
||||
this.incrementDebugIndent().withSlicing(new PathSlicingParams()). processPaths( new ProfilePathProcessorState(cursors.base, cursors.baseCursor + 1, cursors.diffCursor, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
cursors.baseCursor = indexOfFirstNonChild(cursors.base, currentBase, cursors.baseCursor + 1, getBaseLimit());
|
||||
}
|
||||
else {
|
||||
|
@ -933,7 +934,7 @@ public class ProfilePathProcessor {
|
|||
.withRedirector(profileUtilities.redirectorStack(getRedirector(), outcome, currentBasePath))
|
||||
.withSourceStructureDefinition(tgt.getSource())
|
||||
.withSlicing(new PathSlicingParams()).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, newBaseCursor, start - 1, cursors.contextName, cursors.resultPathBase));
|
||||
new ProfilePathProcessorState(cursors.base, newBaseCursor, start - 1, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
} else {
|
||||
int newBaseCursor = cursors.base.getElement().indexOf(tgt.getElement()) + 1;
|
||||
int newBaseLimit = newBaseCursor;
|
||||
|
@ -948,7 +949,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathSource(tgt.getElement().getPath())
|
||||
.withContextPathTarget(outcome.getPath())
|
||||
.withRedirector(profileUtilities.redirectorStack(getRedirector(), outcome, currentBasePath)).withSlicing(new PathSlicingParams()).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, newBaseCursor, start, cursors.contextName, cursors.resultPathBase));
|
||||
new ProfilePathProcessorState(cursors.base, newBaseCursor, start, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
} else {
|
||||
StructureDefinition dt = outcome.getType().size() > 1 ? profileUtilities.getContext().fetchTypeDefinition("Element") : profileUtilities.getProfileForDataType(outcome.getType().get(0), getWebUrl(), getDerived());
|
||||
|
@ -967,7 +968,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathTarget(outcome.getPath())
|
||||
.withSlicing(new PathSlicingParams()).processPaths( /* starting again on the data type, but skip the root */
|
||||
new ProfilePathProcessorState(dt.getSnapshot(), 1 /* starting again on the data type, but skip the root */, start,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
} else {
|
||||
|
||||
this
|
||||
|
@ -979,7 +980,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathTarget( outcome.getPath())
|
||||
.withRedirector(profileUtilities.redirectorStack(getRedirector(), currentBase, currentBasePath)).withSlicing(new PathSlicingParams()).processPaths( /* starting again on the data type, but skip the root */
|
||||
new ProfilePathProcessorState(dt.getSnapshot(), 1 /* starting again on the data type, but skip the root */, start,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -991,7 +992,7 @@ public class ProfilePathProcessor {
|
|||
ElementDefinition currentBase,
|
||||
String currentBasePath,
|
||||
List<ElementDefinition> diffMatches, List<TypeSlice> typeList,
|
||||
final ProfilePathProcessorState cursors
|
||||
final ProfilePathProcessorState cursors, MappingAssistant mapHelper
|
||||
) {
|
||||
// the item is already sliced in the base profile.
|
||||
// here's the rules
|
||||
|
@ -1004,19 +1005,19 @@ public class ProfilePathProcessor {
|
|||
String path = currentBase.getPath();
|
||||
|
||||
if (diffMatches.isEmpty()) {
|
||||
processPathWithSlicedBaseAndEmptyDiffMatches(currentBase, currentBasePath, diffMatches, cursors, path);
|
||||
processPathWithSlicedBaseAndEmptyDiffMatches(currentBase, currentBasePath, diffMatches, cursors, path, mapHelper);
|
||||
}
|
||||
else if (profileUtilities.diffsConstrainTypes(diffMatches, currentBasePath, typeList))
|
||||
{
|
||||
processPathWithSlicedBaseWhereDiffsConstrainTypes(currentBasePath, diffMatches, typeList, cursors);
|
||||
processPathWithSlicedBaseWhereDiffsConstrainTypes(currentBasePath, diffMatches, typeList, cursors, mapHelper);
|
||||
}
|
||||
else
|
||||
{
|
||||
processPathWithSlicedBaseDefault(currentBase, currentBasePath, diffMatches, cursors, path);
|
||||
processPathWithSlicedBaseDefault(currentBase, currentBasePath, diffMatches, cursors, path, mapHelper);
|
||||
}
|
||||
}
|
||||
|
||||
private void processPathWithSlicedBaseDefault(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors, String path) {
|
||||
private void processPathWithSlicedBaseDefault(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors, String path, MappingAssistant mapHelper) {
|
||||
// first - check that the slicing is ok
|
||||
boolean closed = currentBase.getSlicing().getRules() == ElementDefinition.SlicingRules.CLOSED;
|
||||
int diffpos = 0;
|
||||
|
@ -1037,7 +1038,7 @@ public class ProfilePathProcessor {
|
|||
profileUtilities.updateFromBase(outcome, currentBase, getSourceStructureDefinition().getUrl());
|
||||
if (diffMatches.get(0).hasSlicing() || !diffMatches.get(0).hasSliceName()) {
|
||||
profileUtilities.updateFromSlicing(outcome.getSlicing(), diffMatches.get(0).getSlicing());
|
||||
profileUtilities.updateFromDefinition(outcome, diffMatches.get(0), getProfileName(), closed, getUrl(), getSourceStructureDefinition(), getDerived(), diffPath(diffMatches.get(0))); // if there's no slice, we don't want to update the unsliced description
|
||||
profileUtilities.updateFromDefinition(outcome, diffMatches.get(0), getProfileName(), closed, getUrl(), getSourceStructureDefinition(), getDerived(), diffPath(diffMatches.get(0)), mapHelper); // if there's no slice, we don't want to update the unsliced description
|
||||
profileUtilities.removeStatusExtensions(outcome);
|
||||
} else if (!diffMatches.get(0).hasSliceName()) {
|
||||
diffMatches.get(0).setUserData(profileUtilities.UD_GENERATED_IN_SNAPSHOT, outcome); // because of updateFromDefinition isn't called
|
||||
|
@ -1076,7 +1077,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathSource(currentBasePath).withContextPathTarget(outcome.getPath())
|
||||
.withSlicing(new PathSlicingParams()).processPaths(
|
||||
new ProfilePathProcessorState(dt.getSnapshot(), 1, newDiffCursor,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
} else {
|
||||
|
||||
this
|
||||
|
@ -1086,7 +1087,7 @@ public class ProfilePathProcessor {
|
|||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, 0))
|
||||
.withRedirector(null).withSlicing(new PathSlicingParams()).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, cursors.baseCursor + 1, newDiffCursor,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
// throw new Error("Not done yet");
|
||||
// } else if (currentBase.getType().get(0).getCode().equals("BackboneElement") && diffMatches.size() > 0 && diffMatches.get(0).hasSliceName()) {
|
||||
|
@ -1127,7 +1128,7 @@ public class ProfilePathProcessor {
|
|||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, diffpos))
|
||||
.withTrimDifferential(closed)
|
||||
.withSlicing(new PathSlicingParams(true, null, null)).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase));
|
||||
new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
// ok, done with that - now set the cursors for if this is the end
|
||||
cursors.baseCursor = newBaseLimit;
|
||||
cursors.diffCursor = newDiffLimit + 1;
|
||||
|
@ -1175,7 +1176,7 @@ public class ProfilePathProcessor {
|
|||
throw new DefinitionException(profileUtilities.getContext().formatMessage(I18nConstants.ADDING_WRONG_PATH));
|
||||
debugCheck(outcome);
|
||||
getResult().getElement().add(outcome);
|
||||
profileUtilities.updateFromDefinition(outcome, diffItem, getProfileName(), isTrimDifferential(), getUrl(), getSourceStructureDefinition(), getDerived(), diffPath(diffItem));
|
||||
profileUtilities.updateFromDefinition(outcome, diffItem, getProfileName(), isTrimDifferential(), getUrl(), getSourceStructureDefinition(), getDerived(), diffPath(diffItem), mapHelper);
|
||||
profileUtilities.removeStatusExtensions(outcome);
|
||||
// --- LM Added this
|
||||
cursors.diffCursor = getDifferential().getElement().indexOf(diffItem) + 1;
|
||||
|
@ -1204,7 +1205,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathTarget(cursors.base.getElement().get(0).getPath())
|
||||
.withSlicing(new PathSlicingParams()).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, baseStart, start - 1,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
} else {
|
||||
StructureDefinition dt = profileUtilities.getProfileForDataType(outcome.getType().get(0), getWebUrl(), getDerived());
|
||||
// if (t.getCode().equals("Extension") && t.hasProfile() && !t.getProfile().contains(":")) {
|
||||
|
@ -1225,7 +1226,7 @@ public class ProfilePathProcessor {
|
|||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, 0))
|
||||
.withContextPathSource(diffMatches.get(0).getPath()).withContextPathTarget(outcome.getPath()).withSlicing(new PathSlicingParams()).processPaths( /* starting again on the data type, but skip the root */
|
||||
new ProfilePathProcessorState(dt.getSnapshot(), 1 /* starting again on the data type, but skip the root */, start - 1,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1243,7 +1244,7 @@ public class ProfilePathProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
private void processPathWithSlicedBaseWhereDiffsConstrainTypes(String currentBasePath, List<ElementDefinition> diffMatches, List<TypeSlice> typeList, ProfilePathProcessorState cursors) {
|
||||
private void processPathWithSlicedBaseWhereDiffsConstrainTypes(String currentBasePath, List<ElementDefinition> diffMatches, List<TypeSlice> typeList, ProfilePathProcessorState cursors, MappingAssistant mapHelper) {
|
||||
int start = 0;
|
||||
int newBaseLimit = profileUtilities.findEndOfElement(cursors.base, cursors.baseCursor);
|
||||
int newDiffCursor = getDifferential().getElement().indexOf(diffMatches.get(0));
|
||||
|
@ -1329,7 +1330,7 @@ public class ProfilePathProcessor {
|
|||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches,0))
|
||||
.withSlicing(new PathSlicingParams(true, null, currentBasePath)).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
if (e == null)
|
||||
throw new FHIRException(profileUtilities.getContext().formatMessage(I18nConstants.DID_NOT_FIND_TYPE_ROOT_, diffMatches.get(0).getPath()));
|
||||
// now set up slicing on the e (cause it was wiped by what we called.
|
||||
|
@ -1369,7 +1370,7 @@ public class ProfilePathProcessor {
|
|||
.withDiffLimit(newDiffLimit)
|
||||
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, i))
|
||||
.withSlicing(new PathSlicingParams(true, e, currentBasePath)).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, sStart, newDiffCursor, cursors.contextName, cursors.resultPathBase));
|
||||
new ProfilePathProcessorState(cursors.base, sStart, newDiffCursor, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
if (elementToRemove != null) {
|
||||
getDifferential().getElement().remove(elementToRemove);
|
||||
|
@ -1395,7 +1396,7 @@ public class ProfilePathProcessor {
|
|||
.withBaseLimit(bs.getEnd())
|
||||
.withDiffLimit(0)
|
||||
.withProfileName(getProfileName() + profileUtilities.tail(bs.getDefn().getPath())).withSlicing(new PathSlicingParams(true, e, currentBasePath)).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, bs.getStart(), 0, cursors.contextName, cursors.resultPathBase));
|
||||
new ProfilePathProcessorState(cursors.base, bs.getStart(), 0, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -1405,7 +1406,7 @@ public class ProfilePathProcessor {
|
|||
//throw new Error("not done yet - slicing / types @ "+cpath);
|
||||
}
|
||||
|
||||
private void processPathWithSlicedBaseAndEmptyDiffMatches(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors, String path) {
|
||||
private void processPathWithSlicedBaseAndEmptyDiffMatches(ElementDefinition currentBase, String currentBasePath, List<ElementDefinition> diffMatches, ProfilePathProcessorState cursors, String path, MappingAssistant mapHelper) {
|
||||
if (profileUtilities.hasInnerDiffMatches(getDifferential(), path, cursors.diffCursor, getDiffLimit(), cursors.base.getElement(), true)) {
|
||||
// so we just copy it in
|
||||
ElementDefinition outcome = profileUtilities.updateURLs(getUrl(), getWebUrl(), currentBase.copy());
|
||||
|
@ -1425,7 +1426,7 @@ public class ProfilePathProcessor {
|
|||
this
|
||||
.incrementDebugIndent()
|
||||
.withSlicing(new PathSlicingParams()).processPaths(
|
||||
new ProfilePathProcessorState(cursors.base, cursors.baseCursor + 1, cursors.diffCursor, cursors.contextName, cursors.resultPathBase));
|
||||
new ProfilePathProcessorState(cursors.base, cursors.baseCursor + 1, cursors.diffCursor, cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
cursors.baseCursor = indexOfFirstNonChild(cursors.base, currentBase, cursors.baseCursor, getBaseLimit());
|
||||
} else {
|
||||
StructureDefinition dt = profileUtilities.getTypeForElement(getDifferential(), cursors.diffCursor, getProfileName(), diffMatches, outcome, getWebUrl(), getDerived());
|
||||
|
@ -1447,7 +1448,7 @@ public class ProfilePathProcessor {
|
|||
.withContextPathSource(currentBasePath)
|
||||
.withContextPathTarget(outcome.getPath()).withSlicing(new PathSlicingParams()).processPaths( /* starting again on the data type, but skip the root */
|
||||
new ProfilePathProcessorState(dt.getSnapshot(), 1 /* starting again on the data type, but skip the root */, start,
|
||||
cursors.contextName, cursors.resultPathBase));
|
||||
cursors.contextName, cursors.resultPathBase), mapHelper);
|
||||
}
|
||||
}
|
||||
cursors.baseCursor++;
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.hl7.fhir.exceptions.DefinitionException;
|
|||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.exceptions.FHIRFormatError;
|
||||
import org.hl7.fhir.r5.conformance.ElementRedirection;
|
||||
import org.hl7.fhir.r5.conformance.profile.MappingAssistant.MappingMergeModeOption;
|
||||
import org.hl7.fhir.r5.conformance.profile.ProfileUtilities.AllowUnknownProfile;
|
||||
import org.hl7.fhir.r5.conformance.profile.ProfileUtilities.ElementDefinitionCounter;
|
||||
import org.hl7.fhir.r5.context.IWorkerContext;
|
||||
|
@ -205,13 +206,6 @@ public class ProfileUtilities {
|
|||
|
||||
}
|
||||
|
||||
public enum MappingMergeModeOption {
|
||||
DUPLICATE, // if there's more than one mapping for the same URI, just keep them all
|
||||
IGNORE, // if there's more than one, keep the first
|
||||
OVERWRITE, // if there's opre than, keep the last
|
||||
APPEND, // if there's more than one, append them with ';'
|
||||
}
|
||||
|
||||
public enum AllowUnknownProfile {
|
||||
NONE, // exception if there's any unknown profiles (the default)
|
||||
NON_EXTNEIONS, // don't raise an exception except on Extension (because more is going on there
|
||||
|
@ -640,26 +634,6 @@ public class ProfileUtilities {
|
|||
}
|
||||
}
|
||||
|
||||
private void updateMaps(StructureDefinition base, StructureDefinition derived) throws DefinitionException {
|
||||
if (base == null)
|
||||
throw new DefinitionException(context.formatMessage(I18nConstants.NO_BASE_PROFILE_PROVIDED));
|
||||
if (derived == null)
|
||||
throw new DefinitionException(context.formatMessage(I18nConstants.NO_DERIVED_STRUCTURE_PROVIDED));
|
||||
|
||||
for (StructureDefinitionMappingComponent baseMap : base.getMapping()) {
|
||||
boolean found = false;
|
||||
for (StructureDefinitionMappingComponent derivedMap : derived.getMapping()) {
|
||||
if (derivedMap.getUri() != null && derivedMap.getUri().equals(baseMap.getUri())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
derived.getMapping().add(baseMap);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a base (snapshot) profile structure, and a differential profile, generate a new snapshot profile
|
||||
*
|
||||
|
@ -752,7 +726,9 @@ public class ProfileUtilities {
|
|||
// debug = true;
|
||||
// }
|
||||
|
||||
ProfilePathProcessor.processPaths(this, base, derived, url, webUrl, diff, baseSnapshot);
|
||||
MappingAssistant mappingDetails = new MappingAssistant(mappingMergeMode, base, derived, context.getVersion());
|
||||
|
||||
ProfilePathProcessor.processPaths(this, base, derived, url, webUrl, diff, baseSnapshot, mappingDetails);
|
||||
|
||||
checkGroupConstraints(derived);
|
||||
if (derived.getDerivation() == TypeDerivationRule.SPECIALIZATION) {
|
||||
|
@ -761,7 +737,7 @@ public class ProfileUtilities {
|
|||
if (!e.hasUserData(UD_GENERATED_IN_SNAPSHOT) && e.getPath().contains(".")) {
|
||||
ElementDefinition existing = getElementInCurrentContext(e.getPath(), derived.getSnapshot().getElement());
|
||||
if (existing != null) {
|
||||
updateFromDefinition(existing, e, profileName, false, url, base, derived, "StructureDefinition.differential.element["+i+"]");
|
||||
updateFromDefinition(existing, e, profileName, false, url, base, derived, "StructureDefinition.differential.element["+i+"]", mappingDetails);
|
||||
} else {
|
||||
ElementDefinition outcome = updateURLs(url, webUrl, e.copy());
|
||||
e.setUserData(UD_GENERATED_IN_SNAPSHOT, outcome);
|
||||
|
@ -781,7 +757,7 @@ public class ProfileUtilities {
|
|||
|
||||
if (derived.getKind() != StructureDefinitionKind.LOGICAL && !derived.getSnapshot().getElementFirstRep().getType().isEmpty())
|
||||
throw new Error(context.formatMessage(I18nConstants.TYPE_ON_FIRST_SNAPSHOT_ELEMENT_FOR__IN__FROM_, derived.getSnapshot().getElementFirstRep().getPath(), derived.getUrl(), base.getUrl()));
|
||||
updateMaps(base, derived);
|
||||
mappingDetails.update();
|
||||
|
||||
setIds(derived, false);
|
||||
if (debug) {
|
||||
|
@ -2374,7 +2350,7 @@ public class ProfileUtilities {
|
|||
}
|
||||
|
||||
|
||||
protected void updateFromDefinition(ElementDefinition dest, ElementDefinition source, String pn, boolean trimDifferential, String purl, StructureDefinition srcSD, StructureDefinition derivedSrc, String path) throws DefinitionException, FHIRException {
|
||||
protected void updateFromDefinition(ElementDefinition dest, ElementDefinition source, String pn, boolean trimDifferential, String purl, StructureDefinition srcSD, StructureDefinition derivedSrc, String path, MappingAssistant mappings) throws DefinitionException, FHIRException {
|
||||
source.setUserData(UD_GENERATED_IN_SNAPSHOT, dest);
|
||||
// we start with a clone of the base profile ('dest') and we copy from the profile ('source')
|
||||
// over the top for anything the source has
|
||||
|
@ -2845,18 +2821,7 @@ public class ProfileUtilities {
|
|||
t.setUserData(UD_DERIVATION_EQUALS, true);
|
||||
}
|
||||
|
||||
List<ElementDefinitionMappingComponent> list = new ArrayList<>();
|
||||
list.addAll(base.getMapping());
|
||||
base.getMapping().clear();
|
||||
addMappings(base.getMapping(), list);
|
||||
if (derived.hasMapping()) {
|
||||
addMappings(base.getMapping(), derived.getMapping());
|
||||
}
|
||||
for (ElementDefinitionMappingComponent m : base.getMapping()) {
|
||||
if (m.hasMap()) {
|
||||
m.setMap(m.getMap().trim());
|
||||
}
|
||||
}
|
||||
mappings.merge(derived, base); // note reversal of names to be correct in .merge()
|
||||
|
||||
// todo: constraints are cumulative. there is no replacing
|
||||
for (ElementDefinitionConstraintComponent s : base.getConstraint()) {
|
||||
|
@ -2960,52 +2925,6 @@ public class ProfileUtilities {
|
|||
tgt.getExtension().addAll(src.getExtension());
|
||||
}
|
||||
|
||||
private void addMappings(List<ElementDefinitionMappingComponent> destination, List<ElementDefinitionMappingComponent> source) {
|
||||
for (ElementDefinitionMappingComponent s : source) {
|
||||
boolean found = false;
|
||||
for (ElementDefinitionMappingComponent d : destination) {
|
||||
if (compareMaps(s, d)) {
|
||||
found = true;
|
||||
d.setUserData(UD_DERIVATION_EQUALS, true);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
destination.add(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean compareMaps(ElementDefinitionMappingComponent s, ElementDefinitionMappingComponent d) {
|
||||
if (d.getIdentity().equals(s.getIdentity()) && d.getMap().equals(s.getMap())) {
|
||||
return true;
|
||||
}
|
||||
if (VersionUtilities.isR5Plus(context.getVersion())) {
|
||||
if (d.getIdentity().equals(s.getIdentity())) {
|
||||
switch (mappingMergeMode) {
|
||||
case APPEND:
|
||||
if (!Utilities.splitStrings(d.getMap(), "\\,").contains(s.getMap())) {
|
||||
d.setMap(d.getMap()+","+s.getMap());
|
||||
}
|
||||
return true;
|
||||
case DUPLICATE:
|
||||
return false;
|
||||
case IGNORE:
|
||||
d.setMap(s.getMap());
|
||||
return true;
|
||||
case OVERWRITE:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkTypeDerivation(String purl, StructureDefinition srcSD, ElementDefinition base, ElementDefinition derived, TypeRefComponent ts, String path) {
|
||||
boolean ok = false;
|
||||
CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder();
|
||||
|
|
|
@ -1108,6 +1108,21 @@ public class Element extends Base implements NamedItem {
|
|||
return null;
|
||||
}
|
||||
|
||||
public List<Element> getExtensions(String url) {
|
||||
List<Element> list = new ArrayList<>();
|
||||
if (children != null) {
|
||||
for (Element child : children) {
|
||||
if (extensionList.contains(child.getName())) {
|
||||
String u = child.getChildValue("url");
|
||||
if (url.equals(u)) {
|
||||
list.add(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public Base getExtensionValue(String url) {
|
||||
if (children != null) {
|
||||
for (Element child : children) {
|
||||
|
|
|
@ -144,7 +144,7 @@ public class XmlParser extends ParserBase {
|
|||
stream.reset();
|
||||
|
||||
// use a slower parser that keeps location data
|
||||
TransformerFactory transformerFactory = TransformerFactory.newInstance();
|
||||
TransformerFactory transformerFactory = XMLUtil.newXXEProtectedTransformerFactory();
|
||||
Transformer nullTransformer = transformerFactory.newTransformer();
|
||||
DocumentBuilder docBuilder = factory.newDocumentBuilder();
|
||||
doc = docBuilder.newDocument();
|
||||
|
@ -255,6 +255,8 @@ public class XmlParser extends ParserBase {
|
|||
return "sdtc:";
|
||||
if (ns.equals("urn:ihe:pharm"))
|
||||
return "pharm:";
|
||||
if (ns.equals("http://ns.electronichealth.net.au/Ci/Cda/Extensions/3.0"))
|
||||
return "ext:";
|
||||
return "?:";
|
||||
}
|
||||
|
||||
|
|
|
@ -3735,20 +3735,20 @@ public class FHIRPathEngine {
|
|||
|
||||
case LowBoundary:
|
||||
case HighBoundary: {
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp);
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp, true);
|
||||
if (paramTypes.size() > 0) {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes, new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer));
|
||||
}
|
||||
if (focus.hasType("decimal") && (focus.hasType("date") || focus.hasType("datetime") || focus.hasType("instant"))) {
|
||||
if ((focus.hasType("date") || focus.hasType("datetime") || focus.hasType("instant"))) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal, TypeDetails.FP_DateTime);
|
||||
} else if (focus.hasType("decimal")) {
|
||||
} else if (focus.hasType("decimal") || focus.hasType("integer")) {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Decimal);
|
||||
} else {
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_DateTime);
|
||||
}
|
||||
}
|
||||
case Precision: {
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp);
|
||||
checkContextContinuous(focus, exp.getFunction().toCode(), exp, false);
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Integer);
|
||||
}
|
||||
case hasTemplateIdOf: {
|
||||
|
@ -3897,8 +3897,8 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
|
||||
private void checkContextContinuous(TypeDetails focus, String name, ExpressionNode expr) throws PathEngineException {
|
||||
if (!focus.hasNoTypes() && !focus.hasType("decimal") && !focus.hasType("date") && !focus.hasType("dateTime") && !focus.hasType("time") && !focus.hasType("Quantity")) {
|
||||
private void checkContextContinuous(TypeDetails focus, String name, ExpressionNode expr, boolean allowInteger) throws PathEngineException {
|
||||
if (!focus.hasNoTypes() && !focus.hasType("decimal") && !focus.hasType("date") && !focus.hasType("dateTime") && !focus.hasType("time") && !focus.hasType("Quantity") && !(allowInteger && focus.hasType("integer"))) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_CONTINUOUS_ONLY, name, focus.describe());
|
||||
}
|
||||
}
|
||||
|
@ -4295,7 +4295,7 @@ public class FHIRPathEngine {
|
|||
if (focus.size() > 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "lowBoundary", focus.size());
|
||||
}
|
||||
int precision = 0;
|
||||
Integer precision = null;
|
||||
if (expr.getParameters().size() > 0) {
|
||||
List<Base> n1 = execute(context, focus, expr.getParameters().get(0), true);
|
||||
if (n1.size() != 1) {
|
||||
|
@ -4308,17 +4308,23 @@ public class FHIRPathEngine {
|
|||
List<Base> result = new ArrayList<Base>();
|
||||
|
||||
if (base.hasType("decimal")) {
|
||||
result.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == 0 ? 8 : precision)));
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("integer")) {
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.lowBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("date")) {
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == 0 ? 10 : precision)));
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == null ? 10 : precision)));
|
||||
} else if (base.hasType("dateTime")) {
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == 0 ? 17 : precision)));
|
||||
result.add(new DateTimeType(Utilities.lowBoundaryForDate(base.primitiveValue(), precision == null ? 17 : precision)));
|
||||
} else if (base.hasType("time")) {
|
||||
result.add(new TimeType(Utilities.lowBoundaryForTime(base.primitiveValue(), precision == 0 ? 9 : precision)));
|
||||
result.add(new TimeType(Utilities.lowBoundaryForTime(base.primitiveValue(), precision == null ? 9 : precision)));
|
||||
} else if (base.hasType("Quantity")) {
|
||||
String value = getNamedValue(base, "value");
|
||||
Base v = base.copy();
|
||||
v.setProperty("value", new DecimalType(Utilities.lowBoundaryForDecimal(value, precision == 0 ? 8 : precision)));
|
||||
v.setProperty("value", new DecimalType(Utilities.lowBoundaryForDecimal(value, precision == null ? 8 : precision)));
|
||||
result.add(v);
|
||||
} else {
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(), "decimal or date");
|
||||
|
@ -4333,7 +4339,7 @@ public class FHIRPathEngine {
|
|||
if (focus.size() > 1) {
|
||||
throw makeExceptionPlural(focus.size(), expr, I18nConstants.FHIRPATH_FOCUS, "highBoundary", focus.size());
|
||||
}
|
||||
int precision = 0;
|
||||
Integer precision = null;
|
||||
if (expr.getParameters().size() > 0) {
|
||||
List<Base> n1 = execute(context, focus, expr.getParameters().get(0), true);
|
||||
if (n1.size() != 1) {
|
||||
|
@ -4346,17 +4352,23 @@ public class FHIRPathEngine {
|
|||
Base base = focus.get(0);
|
||||
List<Base> result = new ArrayList<Base>();
|
||||
if (base.hasType("decimal")) {
|
||||
result.add(new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == 0 ? 8 : precision)));
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("integer")) {
|
||||
if (precision == null || (precision >= 0 && precision < 17)) {
|
||||
result.add(new DecimalType(Utilities.highBoundaryForDecimal(base.primitiveValue(), precision == null ? 8 : precision)));
|
||||
}
|
||||
} else if (base.hasType("date")) {
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == 0 ? 10 : precision)));
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == null ? 10 : precision)));
|
||||
} else if (base.hasType("dateTime")) {
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == 0 ? 17 : precision)));
|
||||
result.add(new DateTimeType(Utilities.highBoundaryForDate(base.primitiveValue(), precision == null ? 17 : precision)));
|
||||
} else if (base.hasType("time")) {
|
||||
result.add(new TimeType(Utilities.highBoundaryForTime(base.primitiveValue(), precision == 0 ? 9 : precision)));
|
||||
result.add(new TimeType(Utilities.highBoundaryForTime(base.primitiveValue(), precision == null ? 9 : precision)));
|
||||
} else if (base.hasType("Quantity")) {
|
||||
String value = getNamedValue(base, "value");
|
||||
Base v = base.copy();
|
||||
v.setProperty("value", new DecimalType(Utilities.highBoundaryForDecimal(value, precision == 0 ? 8 : precision)));
|
||||
v.setProperty("value", new DecimalType(Utilities.highBoundaryForDecimal(value, precision == null ? 8 : precision)));
|
||||
result.add(v);
|
||||
} else {
|
||||
makeException(expr, I18nConstants.FHIRPATH_WRONG_PARAM_TYPE, "sqrt", "(focus)", base.fhirType(), "decimal or date");
|
||||
|
|
|
@ -5625,6 +5625,11 @@ public boolean hasTarget() {
|
|||
|
||||
@Block()
|
||||
public static class ElementDefinitionMappingComponent extends Element implements IBaseDatatypeElement {
|
||||
@Override
|
||||
public String toString() {
|
||||
return identity+"=" + map;
|
||||
}
|
||||
|
||||
/**
|
||||
* An internal reference to the definition of a mapping.
|
||||
*/
|
||||
|
|
|
@ -397,6 +397,11 @@ public class StructureDefinition extends CanonicalResource {
|
|||
|
||||
@Block()
|
||||
public static class StructureDefinitionMappingComponent extends BackboneElement implements IBaseBackboneElement {
|
||||
@Override
|
||||
public String toString() {
|
||||
return identity + "=" + uri + " (\""+name+"\")";
|
||||
}
|
||||
|
||||
/**
|
||||
* An Internal id that is used to identify this mapping set when specific mappings are made.
|
||||
*/
|
||||
|
|
|
@ -18,6 +18,10 @@ import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponen
|
|||
import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceOperationComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementDocumentComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementMessagingComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementMessagingSupportedMessageComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementMessagingEndpointComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.ReferenceHandlingPolicy;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.ResourceInteractionComponent;
|
||||
import org.hl7.fhir.r5.model.CapabilityStatement.SystemInteractionComponent;
|
||||
|
@ -281,6 +285,30 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
|
||||
}
|
||||
|
||||
private class ResourceInteraction {
|
||||
private String codeString;
|
||||
private String documentation;
|
||||
public ResourceInteraction(String code, String markdown) {
|
||||
codeString = code;
|
||||
if (!Utilities.noString(markdown)) {
|
||||
documentation = markdown;
|
||||
}
|
||||
else {
|
||||
documentation = null;
|
||||
}
|
||||
}
|
||||
|
||||
public String getDocumentation() {
|
||||
return documentation;
|
||||
}
|
||||
|
||||
public String getInteraction() {
|
||||
return codeString;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void render(RenderingStatus status, XhtmlNode x, CapabilityStatement conf, ResourceWrapper res) throws FHIRFormatError, DefinitionException, IOException {
|
||||
status.setExtensions(true);
|
||||
boolean igRenderingMode = (context.getRules() == GenerationRules.IG_PUBLISHER);
|
||||
|
@ -346,16 +374,42 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
//Third time for individual resources
|
||||
int resCount = 1;
|
||||
for (CapabilityStatementRestResourceComponent r : rest.getResource()) {
|
||||
addResourceConfigPanel(x, r, nextLevel+1, count, resCount, igRenderingMode);
|
||||
addResourceConfigPanel(status, res, x, r, nextLevel+1, count, resCount, igRenderingMode);
|
||||
resCount++;
|
||||
}
|
||||
}
|
||||
if (rest.getOperation().size() > 0) {
|
||||
//TODO Figure out what should come out of this
|
||||
x.h(nextLevel,"operationsCap" + Integer.toString(count)).addText(context.formatPhrase(RenderingContext.CAPABILITY_OP));
|
||||
x.h(nextLevel+1,"operationsSummary" + Integer.toString(count)).addText(context.formatPhrase(RenderingContext.OP_DEF_USE));
|
||||
}
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
int messagingNum = conf.getMessaging().size();
|
||||
nextLevel = 3;
|
||||
if (messagingNum > 0) {
|
||||
x.h(2,"messaging").addText((context.formatPhrase(RenderingContext.CAPABILITY_MESSAGING_CAPS)));
|
||||
int count=1;
|
||||
for (CapabilityStatementMessagingComponent msg : conf.getMessaging())
|
||||
{
|
||||
addMessagingPanel(status, res, x, msg, nextLevel, count, messagingNum);
|
||||
count++;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
int documentNum = conf.getDocument().size();
|
||||
nextLevel = 3;
|
||||
if (documentNum > 0) {
|
||||
x.h(2,"document").addText((context.formatPhrase(RenderingContext.CAPABILITY_DOCUMENT_CAPS)));
|
||||
addDocumentTable(status, res, x, conf, nextLevel);
|
||||
}
|
||||
|
||||
|
||||
if (multExpectationsPresent) {
|
||||
addWarningPanel(x,"⹋⹋ - this mark indicates that there are more than one expectation extensions present");
|
||||
addWarningPanel(x,"⹋⹋ - " + context.formatPhrase(RenderingContext.CAPABILITY_MULT_EXT));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -484,7 +538,7 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
}
|
||||
}
|
||||
if (igMays.size() > 0) {
|
||||
x.h(3,"shouldIGs").addText(context.formatPhrase(RenderingContext.CAPABILITY_SHOULD_SUPP));
|
||||
x.h(3,"mayIGs").addText(context.formatPhrase(RenderingContext.CAPABILITY_MAY_SUPP));
|
||||
ul = x.ul();
|
||||
for (String url : igMays) {
|
||||
addResourceLink(ul.li(), url, url);
|
||||
|
@ -522,7 +576,7 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
capExpectation = getExtValueCode(c.getExtensionByUrl(EXPECTATION));
|
||||
if (!Utilities.noString(capExpectation)) {
|
||||
lItem.addTag("strong").addText(capExpectation);
|
||||
lItem.addText(context.formatPhrase(RenderingContext.CAPABILITY_SUPP) + " ");
|
||||
lItem.addText(" " + context.formatPhrase(RenderingContext.CAPABILITY_SUPP) + " ");
|
||||
}
|
||||
lItem.code().addText(c.getCode());
|
||||
first = false;
|
||||
|
@ -564,6 +618,107 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
|
||||
}
|
||||
|
||||
private void addMessagingPanel(RenderingStatus status, ResourceWrapper res, XhtmlNode x, CapabilityStatementMessagingComponent msg, int nextLevel, int index, int total) throws FHIRFormatError, DefinitionException, IOException {
|
||||
XhtmlNode panel= null;
|
||||
XhtmlNode body = null;
|
||||
XhtmlNode row = null;
|
||||
XhtmlNode heading = null;
|
||||
|
||||
XhtmlNode table;
|
||||
XhtmlNode tbody;
|
||||
XhtmlNode tr;
|
||||
|
||||
panel = x.div().attribute("class", "panel panel-default");
|
||||
heading = panel.div().attribute("class", "panel-heading").h(nextLevel,"messaging_" + Integer.toString(index)).attribute("class", "panel-title");
|
||||
if(total == 1)
|
||||
{
|
||||
heading.addText(context.formatPhrase(RenderingContext.CAPABILITY_MESSAGING_CAP));
|
||||
}
|
||||
else
|
||||
{
|
||||
heading.addText(context.formatPhrase(RenderingContext.CAPABILITY_MESSAGING_CAP) + " " + String.valueOf(index));
|
||||
}
|
||||
|
||||
body = panel.div().attribute("class", "panel-body");
|
||||
|
||||
if(msg.hasReliableCache())
|
||||
{
|
||||
addLead(body, "Reliable Cache Length");
|
||||
body.br();
|
||||
body.addText(String.valueOf(msg.getReliableCache()) + " Minute(s)");
|
||||
body.br();
|
||||
}
|
||||
|
||||
if(msg.hasEndpoint())
|
||||
{
|
||||
body.h(nextLevel+1,"msg_end_"+Integer.toString(index)).addText(context.formatPhrase(RenderingContext.CAPABILITY_ENDPOINTS));
|
||||
table = body.table("table table-condensed table-hover");
|
||||
tr = table.addTag("thead").tr();
|
||||
tr.th().addText("Protocol");
|
||||
tr.th().addText("Address");
|
||||
|
||||
tbody = table.addTag("tbody");
|
||||
for (CapabilityStatementMessagingEndpointComponent end : msg.getEndpoint())
|
||||
{
|
||||
tr = tbody.tr();
|
||||
renderDataType(status, tr.td(), wrapNC(end.getProtocol()));
|
||||
renderUri(status, tr.td(), wrapNC(end.getAddressElement()));
|
||||
}
|
||||
body.br();
|
||||
}
|
||||
|
||||
if(msg.hasSupportedMessage())
|
||||
{
|
||||
body.h(nextLevel+1,"msg_end_"+Integer.toString(index)).addText(context.formatPhrase(RenderingContext.CAPABILITY_SUPP_MSGS));
|
||||
table = body.table("table table-condensed table-hover");
|
||||
tr = table.addTag("thead").tr();
|
||||
tr.th().addText("Mode");
|
||||
tr.th().addText(context.formatPhrase(RenderingContext.GENERAL_DEFINITION));
|
||||
|
||||
tbody = table.addTag("tbody");
|
||||
for (CapabilityStatementMessagingSupportedMessageComponent sup : msg.getSupportedMessage())
|
||||
{
|
||||
tr = tbody.tr();
|
||||
tr.td().addText(sup.getMode().toCode());
|
||||
renderCanonical(status, res, tr.td(), StructureDefinition.class, sup.getDefinitionElement());
|
||||
}
|
||||
if(msg.hasDocumentation())
|
||||
{
|
||||
addLead(body, context.formatPhrase(RenderingContext.GENERAL_DOCUMENTATION));
|
||||
addMarkdown(body.blockquote(), msg.getDocumentation());
|
||||
}
|
||||
body.br();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void addDocumentTable(RenderingStatus status, ResourceWrapper res, XhtmlNode x, CapabilityStatement conf, int nextLevel) throws FHIRFormatError, DefinitionException, IOException {
|
||||
XhtmlNode table;
|
||||
XhtmlNode tbody;
|
||||
XhtmlNode tr;
|
||||
|
||||
table = x.table("table table-condensed table-hover");
|
||||
tr = table.addTag("thead").tr();
|
||||
tr.th().addText("Mode");
|
||||
tr.th().addText(context.formatPhrase(RenderingContext.CAPABILITY_PROF_RES_DOC));
|
||||
tr.th().addText(context.formatPhrase(RenderingContext.GENERAL_DOCUMENTATION));
|
||||
|
||||
tbody = table.addTag("tbody");
|
||||
for (CapabilityStatementDocumentComponent document : conf.getDocument()) {
|
||||
tr = tbody.tr();
|
||||
tr.td().addText(document.getMode().toCode());
|
||||
renderCanonical(status, res, tr.td(), StructureDefinition.class, document.getProfileElement());
|
||||
if(document.hasDocumentation())
|
||||
{
|
||||
addMarkdown(tr.td(), document.getDocumentation());
|
||||
}
|
||||
else
|
||||
{
|
||||
tr.td().nbsp();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String getCorsText(boolean on) {
|
||||
if (on) {
|
||||
return context.formatPhrase(RenderingContext.CAPABILITY_CORS_YES);
|
||||
|
@ -639,10 +794,10 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
for (Map<String,String> interactionMap : interactions) {
|
||||
item = uList.li();
|
||||
if (Utilities.noString(verb)) {
|
||||
item.addText(context.formatPhrase(RenderingContext.CAPABILITY_SUPP_THE) + " ");
|
||||
item.addText(context.formatPhrase(RenderingContext.CAPABILITY_SUPPS_THE) + " ");
|
||||
}
|
||||
else {
|
||||
item.addTag("strong").addText(verb);
|
||||
item.addTag("strong").addText(verb + " ");
|
||||
item.addText(context.formatPhrase(RenderingContext.CAPABILITY_SUPP_THE) + " ");
|
||||
}
|
||||
interaction = interactionMap.keySet().toArray()[0].toString();
|
||||
|
@ -669,7 +824,7 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
}
|
||||
}
|
||||
|
||||
private void addInteractionSummaryList(XhtmlNode uList, String verb, List<String> interactions) {
|
||||
private void addInteractionSummaryList(XhtmlNode uList, String verb, List<ResourceInteraction> interactions) {
|
||||
if (interactions.size() == 0) return;
|
||||
XhtmlNode item = uList.li();
|
||||
if (Utilities.noString(verb)) {
|
||||
|
@ -677,10 +832,10 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
}
|
||||
else {
|
||||
item.addTag("strong").addText(verb);
|
||||
item.addText(context.formatPhrase(RenderingContext.CAPABILITY_SUPP) + " ");
|
||||
item.addText(" " + context.formatPhrase(RenderingContext.CAPABILITY_SUPP) + " ");
|
||||
}
|
||||
addSeparatedListOfCodes(item, interactions, ",");
|
||||
item.addText(".");
|
||||
|
||||
applyInteractionsList(item, interactions);
|
||||
}
|
||||
|
||||
private void addSummaryIntro(XhtmlNode x) {
|
||||
|
@ -779,7 +934,7 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
renderSupportedProfiles(status, res, profCell, r);
|
||||
}
|
||||
//Show capabilities
|
||||
tr.td().addText(showOp(r, TypeRestfulInteraction.READ));
|
||||
tr.td().attribute("class", "text-center").addText(showOp(r, TypeRestfulInteraction.READ));
|
||||
if (hasVRead)
|
||||
tr.td().attribute("class", "text-center").addText(showOp(r, TypeRestfulInteraction.VREAD));
|
||||
tr.td().attribute("class", "text-center").addText(showOp(r, TypeRestfulInteraction.SEARCHTYPE));
|
||||
|
@ -934,11 +1089,45 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
result.add("$"+op.getName());
|
||||
}
|
||||
}
|
||||
else {
|
||||
result.add("$"+op.getName());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void addResourceConfigPanel(XhtmlNode x, CapabilityStatementRestResourceComponent r, int nextLevel, int count, int resCount, boolean igRenderingMode) throws FHIRFormatError, DefinitionException, IOException {
|
||||
private void applyInteractionsList(XhtmlNode item, List<ResourceInteraction> list) {
|
||||
List<String> noDocList = new ArrayList<String>();
|
||||
List<ResourceInteraction> docList = new ArrayList<ResourceInteraction>();
|
||||
for (ResourceInteraction inter : list) {
|
||||
if (Utilities.noString(inter.getDocumentation())) {
|
||||
noDocList.add(inter.getInteraction());
|
||||
}
|
||||
else {
|
||||
docList.add(inter);
|
||||
}
|
||||
}
|
||||
if (noDocList.size() > 0) {
|
||||
addSeparatedListOfCodes(item,noDocList, ",");
|
||||
}
|
||||
if (docList.size() > 0) {
|
||||
item.br();
|
||||
for (ResourceInteraction inter : docList) {
|
||||
item.code().addText(inter.getInteraction());
|
||||
try {
|
||||
addMarkdown(item, inter.getDocumentation());
|
||||
}
|
||||
catch(IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
item.addText(".");
|
||||
}
|
||||
}
|
||||
|
||||
private void addResourceConfigPanel(RenderingStatus status, ResourceWrapper res, XhtmlNode x, CapabilityStatementRestResourceComponent r, int nextLevel, int count, int resCount, boolean igRenderingMode) throws FHIRFormatError, DefinitionException, IOException {
|
||||
XhtmlNode panel= null;
|
||||
XhtmlNode body = null;
|
||||
XhtmlNode panelHead = null;
|
||||
|
@ -973,7 +1162,7 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
cell = row.div().attribute("class", "col-lg-6");
|
||||
addLead(cell,context.formatPhrase(RenderingContext.CAPABILITY_BASE_SYS));
|
||||
cell.br();
|
||||
addResourceLink(cell, text, text);
|
||||
renderCanonical(status, res, cell, StructureDefinition.class, r.getProfileElement());
|
||||
cell=row.div().attribute("class", "col-lg-3");
|
||||
addLead(cell, context.formatPhrase(RenderingContext.CAPABILITY_PROF_CONF));
|
||||
cell.br();
|
||||
|
@ -1008,7 +1197,7 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
para.br();
|
||||
}
|
||||
first=false;
|
||||
addResourceLink(para, c.asStringValue(), c.asStringValue());
|
||||
renderCanonical(status, res, para, StructureDefinition.class, c);
|
||||
//para.ah(c.asStringValue()).addText(c.asStringValue());
|
||||
}
|
||||
}
|
||||
|
@ -1085,28 +1274,32 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
private void addInteractions(XhtmlNode row, CapabilityStatementRestResourceComponent r, int width) {
|
||||
String capExpectation;
|
||||
String widthString = "col-lg-" + Integer.toString(width);
|
||||
List<String> shalls = new ArrayList<String>();
|
||||
List<String> shoulds = new ArrayList<String>();
|
||||
List<String> mays = new ArrayList<String>();
|
||||
List<String> shouldnots = new ArrayList<String>();
|
||||
List<String> supporteds = new ArrayList<String>();
|
||||
//Need to build a different structure
|
||||
List<ResourceInteraction> shalls = new ArrayList<ResourceInteraction>();
|
||||
List<ResourceInteraction> shoulds = new ArrayList<ResourceInteraction>();
|
||||
List<ResourceInteraction> mays = new ArrayList<ResourceInteraction>();
|
||||
List<ResourceInteraction> shouldnots = new ArrayList<ResourceInteraction>();
|
||||
List<ResourceInteraction> supporteds = new ArrayList<ResourceInteraction>();
|
||||
|
||||
ResourceInteraction tempInteraction = null;
|
||||
|
||||
for (ResourceInteractionComponent op : r.getInteraction()) {
|
||||
capExpectation = expectationForDisplay(op,EXPECTATION);
|
||||
tempInteraction = new ResourceInteraction(op.getCode().toCode(), op.getDocumentation());
|
||||
if (!Utilities.noString(capExpectation)) {
|
||||
switch(capExpectation) {
|
||||
case "SHALL" : shalls.add(op.getCode().toCode());
|
||||
case "SHALL" : shalls.add(tempInteraction);
|
||||
break;
|
||||
case "SHOULD" : shoulds.add(op.getCode().toCode());
|
||||
case "SHOULD" : shoulds.add(tempInteraction);
|
||||
break;
|
||||
case "MAY" : mays.add(op.getCode().toCode());
|
||||
case "MAY" : mays.add(tempInteraction);
|
||||
break;
|
||||
case "SHOULD-NOT" : shouldnots.add(op.getCode().toCode());
|
||||
case "SHOULD-NOT" : shouldnots.add(tempInteraction);
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
supporteds.add(op.getCode().toCode());
|
||||
supporteds.add(tempInteraction);
|
||||
}
|
||||
}
|
||||
XhtmlNode cell = row.div().attribute("class", widthString);
|
||||
|
@ -1494,7 +1687,7 @@ public class CapabilityStatementRenderer extends ResourceRenderer {
|
|||
|
||||
private void addWarningPanel(XhtmlNode node, String text) {
|
||||
XhtmlNode panel = node.addTag("div").attribute("class","panel panel-danger").addTag("div").attribute("class","panel-body");
|
||||
panel.addTag("span").attribute("class","label label-danger").addText("Error detected");
|
||||
panel.addTag("span").attribute("class","label label-danger").addText(context.formatPhrase(RenderingContext.CAPABILITY_ERR_DET));
|
||||
panel.addText(" " + text);
|
||||
}
|
||||
}
|
|
@ -579,8 +579,8 @@ public class ConceptMapRenderer extends TerminologyRenderer {
|
|||
if (!ccm.hasRelationship())
|
||||
tr.td();
|
||||
else {
|
||||
if (ccm.getRelationshipElement().hasExtension(ToolingExtensions.EXT_OLD_CONCEPTMAP_EQUIVALENCE)) {
|
||||
String code = ToolingExtensions.readStringExtension(ccm.getRelationshipElement(), ToolingExtensions.EXT_OLD_CONCEPTMAP_EQUIVALENCE);
|
||||
if (ccm.hasExtension(ToolingExtensions.EXT_OLD_CONCEPTMAP_EQUIVALENCE)) {
|
||||
String code = ToolingExtensions.readStringExtension(ccm, ToolingExtensions.EXT_OLD_CONCEPTMAP_EQUIVALENCE);
|
||||
tr.td().ah(context.prefixLocalHref(eqpath+"#"+code), code).tx(presentEquivalenceCode(code));
|
||||
} else {
|
||||
tr.td().ah(context.prefixLocalHref(eqpath+"#"+ccm.getRelationship().toCode()), ccm.getRelationship().toCode()).tx(presentRelationshipCode(ccm.getRelationship().toCode()));
|
||||
|
|
|
@ -86,8 +86,10 @@ public class OperationDefinitionRenderer extends TerminologyRenderer {
|
|||
} else {
|
||||
p.ah(context.prefixLocalHref(sd.getWebPath())).tx(sd.present());
|
||||
}
|
||||
}
|
||||
x.para().tx(context.formatPhrase(RenderingContext.GENERAL_PARS));
|
||||
}
|
||||
|
||||
x.h3().tx(context.formatPhrase(RenderingContext.GENERAL_PARS));
|
||||
//x.para().tx(context.formatPhrase(RenderingContext.GENERAL_PARS));
|
||||
XhtmlNode tbl = x.table( "grid");
|
||||
XhtmlNode tr = tbl.tr();
|
||||
tr.td().b().tx(context.formatPhrase(RenderingContext.OP_DEF_USE));
|
||||
|
|
|
@ -332,13 +332,14 @@ public class ProfileDrivenRenderer extends ResourceRenderer {
|
|||
}
|
||||
}
|
||||
} else if (!round2 && !exemptFromRendering(child)) {
|
||||
if (isExtension(p)) {
|
||||
boolean isExt = isExtension(p);
|
||||
if (isExt) {
|
||||
status.setExtensions(true);
|
||||
}
|
||||
List<ElementDefinition> grandChildren = getChildrenForPath(profile, allElements, path+"."+p.getName());
|
||||
filterGrandChildren(grandChildren, path+"."+p.getName(), p);
|
||||
if (p.getValues().size() > 0) {
|
||||
if (isSimple(child)) {
|
||||
if (isSimple(child) && !isExt) {
|
||||
XhtmlNode para = x.isPara() ? para = x : x.para();
|
||||
String name = p.getName();
|
||||
if (name.endsWith("[x]"))
|
||||
|
@ -383,22 +384,40 @@ public class ProfileDrivenRenderer extends ResourceRenderer {
|
|||
x.add(tbl);
|
||||
}
|
||||
} else if (isExtension(p)) {
|
||||
StructureDefinition sd = context.getContext().fetchResource(StructureDefinition.class, p.getUrl());
|
||||
for (ResourceWrapper v : p.getValues()) {
|
||||
if (v != null) {
|
||||
ResourceWrapper vp = v.child("value");
|
||||
List<ResourceWrapper> ev = v.children("extension");
|
||||
if (vp != null) {
|
||||
XhtmlNode para = x.para();
|
||||
para.b().addText(labelforExtension(p.getName()));
|
||||
para.b().addText(labelforExtension(sd, p.getUrl()));
|
||||
para.tx(": ");
|
||||
renderLeaf(status, res, vp, profile, child, x, para, false, showCodeDetails, displayHints, path, indent);
|
||||
} else if (!ev.isEmpty()) {
|
||||
XhtmlNode bq = x.addTag("blockquote");
|
||||
bq.para().b().addText(labelforExtension(p.getName()));
|
||||
XhtmlNode bq = x.addTag("blockquote");
|
||||
bq.para().b().addText(labelforExtension(sd, p.getUrl()));
|
||||
// what happens now depends. If all the children are simple extensions, they'll be rendered as properties
|
||||
boolean allSimple = true;
|
||||
for (ResourceWrapper vv : ev) {
|
||||
StructureDefinition ex = context.getWorker().fetchTypeDefinition("Extension");
|
||||
List<ElementDefinition> children = getChildrenForPath(profile, ex.getSnapshot().getElement(), "Extension");
|
||||
generateByProfile(status, res, ex, vv, allElements, child, children, bq, "Extension", showCodeDetails, indent+1);
|
||||
if (!vv.has("value")) {
|
||||
allSimple = false;
|
||||
}
|
||||
}
|
||||
if (allSimple) {
|
||||
XhtmlNode ul = bq.ul();
|
||||
for (ResourceWrapper vv : ev) {
|
||||
XhtmlNode li = ul.li();
|
||||
li.tx(labelForSubExtension(vv.primitiveValue("url"), sd));
|
||||
li.tx(": ");
|
||||
renderLeaf(status, res, vv.child("value"), sd, child, x, li, isExt, showCodeDetails, displayHints, path, indent);
|
||||
}
|
||||
} else {
|
||||
for (ResourceWrapper vv : ev) {
|
||||
StructureDefinition ex = context.getWorker().fetchTypeDefinition("Extension");
|
||||
List<ElementDefinition> children = getChildrenForPath(profile, ex.getSnapshot().getElement(), "Extension");
|
||||
generateByProfile(status, res, ex, vv, allElements, child, children, bq, "Extension", showCodeDetails, indent+1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -417,8 +436,11 @@ public class ProfileDrivenRenderer extends ResourceRenderer {
|
|||
}
|
||||
|
||||
|
||||
private String labelforExtension(String url) {
|
||||
StructureDefinition sd = context.getContext().fetchResource(StructureDefinition.class, url);
|
||||
private String labelForSubExtension(String url, StructureDefinition sd) {
|
||||
return url;
|
||||
}
|
||||
|
||||
private String labelforExtension(StructureDefinition sd, String url) {
|
||||
if (sd == null) {
|
||||
return tail(url);
|
||||
} else {
|
||||
|
@ -467,7 +489,7 @@ public class ProfileDrivenRenderer extends ResourceRenderer {
|
|||
}
|
||||
|
||||
public boolean isExtension(NamedResourceWrapperList p) {
|
||||
return p.getName().contains("extension[");
|
||||
return p.getUrl() != null;
|
||||
}
|
||||
|
||||
|
||||
|
@ -571,12 +593,12 @@ public class ProfileDrivenRenderer extends ResourceRenderer {
|
|||
// 2. Park it
|
||||
NamedResourceWrapperList nl = null;
|
||||
for (NamedResourceWrapperList t : results) {
|
||||
if (t.getName().equals(url)) {
|
||||
if (t.getUrl() != null && t.getUrl().equals(url)) {
|
||||
nl = t;
|
||||
}
|
||||
}
|
||||
if (nl == null) {
|
||||
nl = new NamedResourceWrapperList(url);
|
||||
nl = new NamedResourceWrapperList(p.getName(), url);
|
||||
results.add(nl);
|
||||
}
|
||||
nl.getValues().add(v);
|
||||
|
|
|
@ -152,6 +152,12 @@ public class QuestionnaireRenderer extends TerminologyRenderer {
|
|||
text = v.primitiveValue("code");
|
||||
}
|
||||
}
|
||||
if (value == null) {
|
||||
value = "??";
|
||||
}
|
||||
if (text == null) {
|
||||
text = "??";
|
||||
}
|
||||
boolean selected = "true".equals(opt.primitiveValue("initialSelected"));
|
||||
x.option(value, text, selected);
|
||||
}
|
||||
|
|
|
@ -578,14 +578,16 @@ public abstract class ResourceRenderer extends DataRenderer {
|
|||
protected void renderUri(RenderingStatus status, XhtmlNode x, ResourceWrapper uri) throws FHIRFormatError, DefinitionException, IOException {
|
||||
if (!renderPrimitiveWithNoValue(status, x, uri)) {
|
||||
String v = uri.primitiveValue();
|
||||
boolean local = false;
|
||||
|
||||
if (context.getContextUtilities().isResource(v)) {
|
||||
v = "http://hl7.org/fhir/StructureDefinition/"+v;
|
||||
local = true;
|
||||
}
|
||||
if (v.startsWith("mailto:")) {
|
||||
x.ah(v).addText(v.substring(7));
|
||||
} else {
|
||||
ResourceWithReference rr = resolveReference(uri);
|
||||
ResourceWithReference rr = local ? resolveReference(uri.resource(), v, true) : resolveReference(uri);
|
||||
if (rr != null) {
|
||||
if (rr.getResource() == null) {
|
||||
x.ah(context.prefixLocalHref(rr.getWebPath())).addText(rr.getUrlReference());
|
||||
|
|
|
@ -31,16 +31,28 @@ public abstract class ResourceWrapper {
|
|||
|
||||
public static class NamedResourceWrapperList {
|
||||
private String name;
|
||||
private String url; // for extension definitions
|
||||
private List<ResourceWrapper> values = new ArrayList<ResourceWrapper>();
|
||||
|
||||
|
||||
public NamedResourceWrapperList(String name) {
|
||||
super();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public NamedResourceWrapperList(String name, String url) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public List<ResourceWrapper> getValues() {
|
||||
return values;
|
||||
}
|
||||
|
|
|
@ -275,6 +275,7 @@ public class ToolingExtensions {
|
|||
public static final String EXT_VS_CS_SUPPL_NEEDED = "http://hl7.org/fhir/StructureDefinition/valueset-supplement";
|
||||
public static final String EXT_TYPE_PARAMETER = "http://hl7.org/fhir/tools/StructureDefinition/type-parameter";
|
||||
public static final String EXT_ALTERNATE_CANONICAL = "http://hl7.org/fhir/StructureDefinition/alternate-canonical";
|
||||
public static final String EXT_SUPPRESSED = "http://hl7.org/fhir/StructureDefinition/elementdefinition-suppress";
|
||||
|
||||
// specific extension helpers
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.hl7.fhir.r5.formats.JsonParser;
|
|||
import org.hl7.fhir.r5.formats.XmlParser;
|
||||
import org.hl7.fhir.r5.model.Base;
|
||||
import org.hl7.fhir.r5.model.ElementDefinition.ElementDefinitionBindingComponent;
|
||||
import org.hl7.fhir.r5.model.Narrative;
|
||||
import org.hl7.fhir.r5.model.Resource;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition.StructureDefinitionKind;
|
||||
|
@ -582,6 +583,9 @@ public class SnapShotGenerationTests {
|
|||
rc.setProfileUtilities(new ProfileUtilities(TestingUtilities.getSharedWorkerContext(), null, new TestPKP()));
|
||||
RendererFactory.factory(output, rc).renderResource(ResourceWrapper.forResource(rc.getContextUtilities(), output));
|
||||
}
|
||||
// we just generated it - but we don't care what it is here, just that there's no exceptions (though we need it for the rules)
|
||||
Narrative txt = output.getText();
|
||||
output.setText(null);
|
||||
if (!fail) {
|
||||
test.output = output;
|
||||
TestingUtilities.getSharedWorkerContext().cacheResource(output);
|
||||
|
@ -603,6 +607,7 @@ public class SnapShotGenerationTests {
|
|||
}
|
||||
Assertions.assertTrue(structureDefinitionEquality, "Output does not match expected");
|
||||
}
|
||||
output.setText(txt);
|
||||
}
|
||||
|
||||
private StructureDefinition getSD(String url, SnapShotGenerationTestsContext context) throws DefinitionException, FHIRException, IOException {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ public class MyURIResolver implements URIResolver {
|
|||
if (s != null)
|
||||
return s;
|
||||
}
|
||||
return TransformerFactory.newInstance().getURIResolver().resolve(href, base);
|
||||
return org.hl7.fhir.utilities.xml.XMLUtil.newXXEProtectedTransformerFactory().getURIResolver().resolve(href, base);
|
||||
} else
|
||||
return new StreamSource(ManagedFileAccess.inStream(href.contains(File.separator) ? href : Utilities.path(path, href)));
|
||||
} catch (FileNotFoundException e) {
|
||||
|
|
|
@ -1032,6 +1032,10 @@ public class Utilities {
|
|||
|
||||
|
||||
public static String escapeJson(String value) {
|
||||
return escapeJson(value, true);
|
||||
}
|
||||
|
||||
public static String escapeJson(String value, boolean escapeUnicodeWhitespace) {
|
||||
if (value == null)
|
||||
return "";
|
||||
|
||||
|
@ -1049,7 +1053,7 @@ public class Utilities {
|
|||
b.append("\\\\");
|
||||
else if (c == ' ')
|
||||
b.append(" ");
|
||||
else if (isWhitespace(c)) {
|
||||
else if ((c == '\r' || c == '\n') || (isWhitespace(c) && escapeUnicodeWhitespace)) {
|
||||
b.append("\\u"+Utilities.padLeft(Integer.toHexString(c), '0', 4));
|
||||
} else if (((int) c) < 32)
|
||||
b.append("\\u" + Utilities.padLeft(Integer.toHexString(c), '0', 4));
|
||||
|
@ -1691,31 +1695,47 @@ public class Utilities {
|
|||
value = value.substring(0, value.indexOf("e"));
|
||||
}
|
||||
if (isZero(value)) {
|
||||
return applyPrecision("-0.5000000000000000000000000", precision);
|
||||
return applyPrecision("-0.5000000000000000000000000", precision, true);
|
||||
} else if (value.startsWith("-")) {
|
||||
return "-"+highBoundaryForDecimal(value.substring(1), precision)+(e == null ? "" : e);
|
||||
} else {
|
||||
if (value.contains(".")) {
|
||||
return applyPrecision(minusOne(value)+"50000000000000000000000000000", precision)+(e == null ? "" : e);
|
||||
return applyPrecision(minusOne(value)+"50000000000000000000000000000", precision, true)+(e == null ? "" : e);
|
||||
} else {
|
||||
return applyPrecision(minusOne(value)+".50000000000000000000000000000", precision)+(e == null ? "" : e);
|
||||
return applyPrecision(minusOne(value)+".50000000000000000000000000000", precision, true)+(e == null ? "" : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String applyPrecision(String v, int p) {
|
||||
private static String applyPrecision(String v, int p, boolean down) {
|
||||
String nv = v;
|
||||
int dp = -1;
|
||||
if (nv.contains(".")) {
|
||||
dp = nv.indexOf(".");
|
||||
nv = nv.substring(0, dp)+nv.substring(dp+1);
|
||||
}
|
||||
String s = null;
|
||||
int d = p - getDecimalPrecision(v);
|
||||
if (d == 0) {
|
||||
return v;
|
||||
s = nv;
|
||||
} else if (d > 0) {
|
||||
return v + padLeft("", '0', d);
|
||||
s = nv + padLeft("", '0', d);
|
||||
} else {
|
||||
if (v.charAt(v.length()+d) >= '6') {
|
||||
return v.substring(0, v.length()+d-1)+((char) (v.charAt(v.length()+d)+1));
|
||||
int l = v.length();
|
||||
int ld = l+d;
|
||||
if (dp > -1) {
|
||||
ld--;
|
||||
}
|
||||
if (nv.charAt(ld) >= '5' && !down) {
|
||||
s = nv.substring(0, ld-1)+((char) (nv.charAt(ld-1)+1));
|
||||
} else {
|
||||
return v.substring(0, v.length()+d);
|
||||
s = nv.substring(0, ld);
|
||||
}
|
||||
}
|
||||
if (s.endsWith(".")) {
|
||||
s = s.substring(0, s.length()-1);
|
||||
}
|
||||
return dp == -1 || dp >= s.length() ? s : s.substring(0, dp)+"."+s.substring(dp);
|
||||
}
|
||||
|
||||
private static String minusOne(String value) {
|
||||
|
@ -1827,14 +1847,14 @@ public class Utilities {
|
|||
value = value.substring(0, value.indexOf("e"));
|
||||
}
|
||||
if (isZero(value)) {
|
||||
return applyPrecision("0.50000000000000000000000000000", precision);
|
||||
return applyPrecision("0.50000000000000000000000000000", precision, false);
|
||||
} else if (value.startsWith("-")) {
|
||||
return "-"+lowBoundaryForDecimal(value.substring(1), precision)+(e == null ? "" : e);
|
||||
} else {
|
||||
if (value.contains(".")) {
|
||||
return applyPrecision(value+"50000000000000000000000000000", precision)+(e == null ? "" : e);
|
||||
return applyPrecision(value+"50000000000000000000000000000", precision, false)+(e == null ? "" : e);
|
||||
} else {
|
||||
return applyPrecision(value+".50000000000000000000000000000", precision)+(e == null ? "" : e);
|
||||
return applyPrecision(value+".50000000000000000000000000000", precision, false)+(e == null ? "" : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ public class XsltUtilities {
|
|||
}
|
||||
|
||||
public static byte[] transform(Map<String, byte[]> files, byte[] source, byte[] xslt) throws TransformerException {
|
||||
TransformerFactory f = TransformerFactory.newInstance();
|
||||
TransformerFactory f = org.hl7.fhir.utilities.xml.XMLUtil.newXXEProtectedTransformerFactory();
|
||||
f.setAttribute("http://saxon.sf.net/feature/version-warning", Boolean.FALSE);
|
||||
StreamSource xsrc = new StreamSource(new ByteArrayInputStream(xslt));
|
||||
f.setURIResolver(new ZipURIResolver(files));
|
||||
|
@ -129,7 +129,7 @@ public class XsltUtilities {
|
|||
|
||||
public static void transform(String xsltDir, String source, String xslt, String dest, URIResolver alt) throws TransformerException, IOException {
|
||||
|
||||
TransformerFactory f = TransformerFactory.newInstance();
|
||||
TransformerFactory f = org.hl7.fhir.utilities.xml.XMLUtil.newXXEProtectedTransformerFactory();
|
||||
StreamSource xsrc = new StreamSource(ManagedFileAccess.inStream(xslt));
|
||||
f.setURIResolver(new MyURIResolver(xsltDir, alt));
|
||||
Transformer t = f.newTransformer(xsrc);
|
||||
|
|
|
@ -91,16 +91,18 @@ public class SimpleHTTPClient {
|
|||
c.setInstanceFollowRedirects(false);
|
||||
|
||||
switch (c.getResponseCode()) {
|
||||
case HttpURLConnection.HTTP_MOVED_PERM:
|
||||
case HttpURLConnection.HTTP_MOVED_TEMP:
|
||||
String location = c.getHeaderField("Location");
|
||||
location = URLDecoder.decode(location, "UTF-8");
|
||||
URL base = new URL(url);
|
||||
URL next = new URL(base, location); // Deal with relative URLs
|
||||
url = next.toExternalForm();
|
||||
continue;
|
||||
default:
|
||||
done = true;
|
||||
case HttpURLConnection.HTTP_MOVED_PERM:
|
||||
case HttpURLConnection.HTTP_MOVED_TEMP:
|
||||
case 307:
|
||||
case 308: // Same as HTTP_MOVED_PERM, but does not allow changing the request method from POST to GET
|
||||
String location = c.getHeaderField("Location");
|
||||
location = URLDecoder.decode(location, "UTF-8");
|
||||
URL base = new URL(url);
|
||||
URL next = new URL(base, location); // Deal with relative URLs
|
||||
url = next.toExternalForm();
|
||||
continue;
|
||||
default:
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1107,4 +1107,5 @@ public class I18nConstants {
|
|||
public static final String TYPE_SPECIFIC_CHECKS_DT_XHTML_LITERAL_HREF = "TYPE_SPECIFIC_CHECKS_DT_XHTML_LITERAL_HREF";
|
||||
public static final String SM_TARGET_TYPE_UNKNOWN = "SM_TARGET_TYPE_UNKNOWN";
|
||||
public static final String XHTML_XHTML_ATTRIBUTE_XML_SPACE = "XHTML_XHTML_ATTRIBUTE_XML_SPACE";
|
||||
public static final String VALIDATION_HL7_PUBLISHER_MULTIPLE_WGS = "VALIDATION_HL7_PUBLISHER_MULTIPLE_WGS";
|
||||
}
|
||||
|
|
|
@ -78,6 +78,7 @@ public class RenderingI18nContext extends I18nBase {
|
|||
public static final String CAPABILITY_CREATE_INT = "CAPABILITY_CREATE_INT";
|
||||
public static final String GENERAL_CRIT = "GENERAL_CRIT";
|
||||
public static final String CAPABILITY_DELETE_INT = "CAPABILITY_DELETE_INT";
|
||||
public static final String CAPABILITY_ERR_DET = "CAPABILITY_ERR_DET";
|
||||
public static final String CAPABILITY_EXT_OP = "CAPABILITY_EXT_OP";
|
||||
public static final String CAPABILITY_FHIR = "CAPABILITY_FHIR";
|
||||
public static final String CAPABILITY_FHIR_VER = "CAPABILITY_FHIR_VER";
|
||||
|
@ -88,6 +89,8 @@ public class RenderingI18nContext extends I18nBase {
|
|||
public static final String CAPABILITY_INTER_SUPP = "CAPABILITY_INTER_SUPP";
|
||||
public static final String CAPABILITY_INT_DESC = "CAPABILITY_INT_DESC";
|
||||
public static final String CAPABILITY_INT_SUMM = "CAPABILITY_INT_SUMM";
|
||||
public static final String CAPABILITY_MAY_SUPP = "CAPABILITY_MAY_SUPP";
|
||||
public static final String CAPABILITY_MULT_EXT = "CAPABILITY_MULT_EXT";
|
||||
public static final String CAPABILITY_NOTE_CAP = "CAPABILITY_NOTE_CAP";
|
||||
public static final String CAPABILITY_OP = "CAPABILITY_OP";
|
||||
public static final String CAPABILITY_OPER = "CAPABILITY_OPER";
|
||||
|
@ -99,6 +102,7 @@ public class RenderingI18nContext extends I18nBase {
|
|||
public static final String CAPABILITY_PATCH_INT = "CAPABILITY_PATCH_INT";
|
||||
public static final String GENERAL_PROF = "GENERAL_PROF";
|
||||
public static final String CAPABILITY_PROF_CONF = "CAPABILITY_PROF_CONF";
|
||||
public static final String CAPABILITY_PROF_RES_DOC = "CAPABILITY_PROF_RES_DOC";
|
||||
public static final String CAPABILITY_PROF_MAP = "CAPABILITY_PROF_MAP";
|
||||
public static final String CAPABILITY_PUB_BY = "CAPABILITY_PUB_BY";
|
||||
public static final String CAPABILITY_PUB_ON = "CAPABILITY_PUB_ON";
|
||||
|
@ -106,6 +110,9 @@ public class RenderingI18nContext extends I18nBase {
|
|||
public static final String CAPABILITY_REF_PROF = "CAPABILITY_REF_PROF";
|
||||
public static final String CAPABILITY_REQ_RECOM = "CAPABILITY_REQ_RECOM";
|
||||
public static final String CAPABILITY_REST_CAPS = "CAPABILITY_REST_CAPS";
|
||||
public static final String CAPABILITY_DOCUMENT_CAPS = "CAPABILITY_DOCUMENT_CAPS";
|
||||
public static final String CAPABILITY_MESSAGING_CAPS = "CAPABILITY_MESSAGING_CAPS";
|
||||
public static final String CAPABILITY_MESSAGING_CAP = "CAPABILITY_MESSAGING_CAP";
|
||||
public static final String CAPABILITY_REST_CONFIG = "CAPABILITY_REST_CONFIG";
|
||||
public static final String CAPABILITY_RES_CONF = "CAPABILITY_RES_CONF";
|
||||
public static final String CAPABILITY_RES_ENB = "CAPABILITY_RES_ENB";
|
||||
|
@ -126,7 +133,10 @@ public class RenderingI18nContext extends I18nBase {
|
|||
public static final String CAPABILITY_SUPP_FORM = "CAPABILITY_SUPP_FORM";
|
||||
public static final String CAPABILITY_SUPP_PATCH_FORM = "CAPABILITY_SUPP_PATCH_FORM";
|
||||
public static final String CAPABILITY_SUPP_PROFS = "CAPABILITY_SUPP_PROFS";
|
||||
public static final String CAPABILITY_SUPP_MSGS = "CAPABILITY_SUPP_MSGS";
|
||||
public static final String CAPABILITY_ENDPOINTS = "CAPABILITY_ENDPOINTS";
|
||||
public static final String CAPABILITY_SUPP_THE = "CAPABILITY_SUPP_THE";
|
||||
public static final String CAPABILITY_SUPPS_THE = "CAPABILITY_SUPPS_THE";
|
||||
public static final String GENERAL_TYPE = "GENERAL_TYPE";
|
||||
public static final String CAPABILITY_TYPS = "CAPABILITY_TYPS";
|
||||
public static final String CAPABILITY_TYP_PRES = "CAPABILITY_TYP_PRES";
|
||||
|
|
|
@ -682,7 +682,7 @@ public class JsonParser {
|
|||
break;
|
||||
case STRING:
|
||||
b.append("\"");
|
||||
b.append(Utilities.escapeJson(((JsonString) e).getValue()));
|
||||
b.append(Utilities.escapeJson(((JsonString) e).getValue(), false));
|
||||
b.append("\"");
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -18,7 +18,7 @@ import org.slf4j.LoggerFactory;
|
|||
public abstract class BasePackageCacheManager implements IPackageCacheManager {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BasePackageCacheManager.class);
|
||||
protected List<PackageServer> myPackageServers = new ArrayList<>();
|
||||
protected final List<PackageServer> myPackageServers;
|
||||
private Function<PackageServer, PackageClient> myClientFactory = server -> new PackageClient(server);
|
||||
protected boolean silent;
|
||||
|
||||
|
@ -27,8 +27,12 @@ public abstract class BasePackageCacheManager implements IPackageCacheManager {
|
|||
*/
|
||||
public BasePackageCacheManager() {
|
||||
super();
|
||||
myPackageServers = new ArrayList<>();
|
||||
}
|
||||
|
||||
protected BasePackageCacheManager(@Nonnull List<PackageServer> thePackageServers) {
|
||||
myPackageServers = thePackageServers;
|
||||
}
|
||||
/**
|
||||
* Provide a new client factory implementation
|
||||
*/
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
package org.hl7.fhir.utilities.npm;
|
||||
|
||||
import org.hl7.fhir.utilities.TextFile;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
import org.hl7.fhir.utilities.filesystem.ManagedFileAccess;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.channels.FileLock;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.locks.ReadWriteLock;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
||||
public class FilesystemPackageCacheLock {
|
||||
|
||||
private static final ConcurrentHashMap<File, ReadWriteLock> locks = new ConcurrentHashMap<>();
|
||||
|
||||
private final File lockFile;
|
||||
|
||||
public FilesystemPackageCacheLock(File cacheFolder, String name) throws IOException {
|
||||
this.lockFile = ManagedFileAccess.file(Utilities.path(cacheFolder.getAbsolutePath(), name + ".lock"));
|
||||
if (!lockFile.isFile()) {
|
||||
TextFile.stringToFile("", lockFile);
|
||||
}
|
||||
}
|
||||
|
||||
public <T> T doWriteWithLock(FilesystemPackageCacheManager.CacheLockFunction<T> f) throws IOException {
|
||||
|
||||
try (FileChannel channel = new RandomAccessFile(lockFile, "rw").getChannel()) {
|
||||
locks.putIfAbsent(lockFile, new ReentrantReadWriteLock());
|
||||
ReadWriteLock lock = locks.get(lockFile);
|
||||
lock.writeLock().lock();
|
||||
final FileLock fileLock = channel.lock();
|
||||
T result = null;
|
||||
try {
|
||||
result = f.get();
|
||||
} finally {
|
||||
fileLock.release();
|
||||
lock.writeLock().unlock();
|
||||
}
|
||||
if (!lockFile.delete()) {
|
||||
lockFile.deleteOnExit();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,28 +1,16 @@
|
|||
package org.hl7.fhir.utilities.npm;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.*;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.*;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.With;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
|
@ -87,28 +75,33 @@ import org.slf4j.LoggerFactory;
|
|||
*/
|
||||
public class FilesystemPackageCacheManager extends BasePackageCacheManager implements IPackageCacheManager {
|
||||
|
||||
public static final String INI_TIMESTAMP_FORMAT = "yyyyMMddHHmmss";
|
||||
private final FilesystemPackageCacheManagerLocks locks;
|
||||
|
||||
// When running in testing mode, some packages are provided from the test case repository rather than by the normal means
|
||||
// the PackageProvider is responsible for this. if no package provider is defined, or it declines to handle the package,
|
||||
// then the normal means will be used
|
||||
public interface IPackageProvider {
|
||||
boolean handlesPackage(String id, String version);
|
||||
|
||||
InputStreamWithSrc provide(String id, String version) throws IOException;
|
||||
}
|
||||
|
||||
private static IPackageProvider packageProvider;
|
||||
public static final String PACKAGE_REGEX = "^[a-zA-Z][A-Za-z0-9\\_\\-]*(\\.[A-Za-z0-9\\_\\-]+)+$";
|
||||
public static final String PACKAGE_VERSION_REGEX = "^[A-Za-z][A-Za-z0-9\\_\\-]*(\\.[A-Za-z0-9\\_\\-]+)+\\#[A-Za-z0-9\\-\\_\\$]+(\\.[A-Za-z0-9\\-\\_\\$]+)*$";
|
||||
public static final String PACKAGE_VERSION_REGEX_OPT = "^[A-Za-z][A-Za-z0-9\\_\\-]*(\\.[A-Za-z0-9\\_\\-]+)+(\\#[A-Za-z0-9\\-\\_]+(\\.[A-Za-z0-9\\-\\_]+)*)?$";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(FilesystemPackageCacheManager.class);
|
||||
private static final String CACHE_VERSION = "3"; // second version - see wiki page
|
||||
private File cacheFolder;
|
||||
private boolean progress = true;
|
||||
private List<NpmPackage> temporaryPackages = new ArrayList<>();
|
||||
@Nonnull
|
||||
private final File cacheFolder;
|
||||
|
||||
private final List<NpmPackage> temporaryPackages = new ArrayList<>();
|
||||
private boolean buildLoaded = false;
|
||||
private Map<String, String> ciList = new HashMap<String, String>();
|
||||
private final Map<String, String> ciList = new HashMap<>();
|
||||
private JsonArray buildInfo;
|
||||
private boolean suppressErrors;
|
||||
@Setter
|
||||
@Getter
|
||||
private boolean minimalMemory;
|
||||
|
||||
public static class Builder {
|
||||
|
@ -116,7 +109,8 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
@Getter
|
||||
private final File cacheFolder;
|
||||
|
||||
@With @Getter
|
||||
@With
|
||||
@Getter
|
||||
private final List<PackageServer> packageServers;
|
||||
|
||||
public Builder() throws IOException {
|
||||
|
@ -127,6 +121,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
private File getUserCacheFolder() throws IOException {
|
||||
return ManagedFileAccess.file(Utilities.path(System.getProperty("user.home"), ".fhir", "packages"));
|
||||
}
|
||||
|
||||
private List<PackageServer> getPackageServersFromFHIRSettings() {
|
||||
List<PackageServer> packageServers = new ArrayList<>(getConfiguredServers());
|
||||
if (!isIgnoreDefaultPackageServers()) {
|
||||
|
@ -147,15 +142,16 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
protected List<PackageServer> getConfiguredServers() {
|
||||
return PackageServer.getConfiguredServers();
|
||||
}
|
||||
|
||||
private Builder(File cacheFolder, List<PackageServer> packageServers) {
|
||||
this.cacheFolder = cacheFolder;
|
||||
this.packageServers = packageServers;
|
||||
}
|
||||
|
||||
public Builder withCacheFolder (String cacheFolderPath) throws IOException {
|
||||
public Builder withCacheFolder(String cacheFolderPath) throws IOException {
|
||||
File cacheFolder = ManagedFileAccess.file(cacheFolderPath);
|
||||
if (!cacheFolder.exists()) {
|
||||
throw new FHIRException("The folder '"+cacheFolder+"' could not be found");
|
||||
throw new FHIRException("The folder '" + cacheFolder + "' could not be found");
|
||||
}
|
||||
return new Builder(cacheFolder, this.packageServers);
|
||||
}
|
||||
|
@ -163,7 +159,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
public Builder withSystemCacheFolder() throws IOException {
|
||||
final File systemCacheFolder;
|
||||
if (Utilities.isWindows()) {
|
||||
systemCacheFolder = ManagedFileAccess.file(Utilities.path(System.getenv("ProgramData"), ".fhir", "packages"));
|
||||
systemCacheFolder = ManagedFileAccess.file(Utilities.path(System.getenv("ProgramData"), ".fhir", "packages"));
|
||||
} else {
|
||||
systemCacheFolder = ManagedFileAccess.file(Utilities.path("/var", "lib", ".fhir", "packages"));
|
||||
}
|
||||
|
@ -179,22 +175,65 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
}
|
||||
}
|
||||
|
||||
private FilesystemPackageCacheManager(File cacheFolder, List<PackageServer> packageServers) throws IOException {
|
||||
private FilesystemPackageCacheManager(@Nonnull File cacheFolder, @Nonnull List<PackageServer> packageServers) throws IOException {
|
||||
super(packageServers);
|
||||
this.cacheFolder = cacheFolder;
|
||||
this.myPackageServers = packageServers;
|
||||
initCacheFolder();
|
||||
|
||||
try {
|
||||
this.locks = FilesystemPackageCacheManagerLocks.getFilesystemPackageCacheManagerLocks(cacheFolder);
|
||||
} catch (RuntimeException e) {
|
||||
if (e.getCause() instanceof IOException) {
|
||||
throw (IOException) e.getCause();
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
prepareCacheFolder();
|
||||
}
|
||||
|
||||
protected void initCacheFolder() throws IOException {
|
||||
if (!(cacheFolder.exists()))
|
||||
Utilities.createDirectory(cacheFolder.getAbsolutePath());
|
||||
String packagesIniPath = Utilities.path(cacheFolder, "packages.ini");
|
||||
File packagesIniFile = ManagedFileAccess.file(packagesIniPath);
|
||||
if (!(packagesIniFile.exists()))
|
||||
packagesIniFile.createNewFile();
|
||||
TextFile.stringToFile("[cache]\r\nversion=" + CACHE_VERSION + "\r\n\r\n[urls]\r\n\r\n[local]\r\n\r\n", packagesIniPath);
|
||||
createIniFile();
|
||||
for (File f : cacheFolder.listFiles()) {
|
||||
/**
|
||||
* Check if the cache folder exists and is valid.
|
||||
* <p>
|
||||
* If it doesn't exist, create it.
|
||||
* <p>
|
||||
* If it does exist and isn't valid, delete it and create a new one.
|
||||
* <p>
|
||||
* If it does exist and is valid, just do some cleanup (delete temp download directories, etc.)
|
||||
*
|
||||
* @throws IOException if the cache folder can't be created
|
||||
*/
|
||||
protected void prepareCacheFolder() throws IOException {
|
||||
locks.getCacheLock().doWriteWithLock(() -> {
|
||||
|
||||
if (!(cacheFolder.exists())) {
|
||||
Utilities.createDirectory(cacheFolder.getAbsolutePath());
|
||||
createIniFile();
|
||||
} else {
|
||||
if (!isCacheFolderValid()) {
|
||||
clearCache();
|
||||
createIniFile();
|
||||
} else {
|
||||
deleteOldTempDirectories();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isCacheFolderValid() throws IOException {
|
||||
String iniPath = getPackagesIniPath();
|
||||
File iniFile = ManagedFileAccess.file(iniPath);
|
||||
if (!(iniFile.exists())) {
|
||||
return false;
|
||||
}
|
||||
IniFile ini = new IniFile(iniPath);
|
||||
String v = ini.getStringProperty("cache", "version");
|
||||
return CACHE_VERSION.equals(v);
|
||||
}
|
||||
|
||||
private void deleteOldTempDirectories() throws IOException {
|
||||
for (File f : Objects.requireNonNull(cacheFolder.listFiles())) {
|
||||
if (f.isDirectory() && Utilities.isValidUUID(f.getName())) {
|
||||
Utilities.clearDirectory(f.getAbsolutePath());
|
||||
f.delete();
|
||||
|
@ -202,6 +241,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
private void initPackageServers() {
|
||||
myPackageServers.addAll(getConfiguredServers());
|
||||
if (!isIgnoreDefaultPackageServers()) {
|
||||
|
@ -222,84 +262,43 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return PackageServer.getConfiguredServers();
|
||||
}
|
||||
|
||||
public boolean isMinimalMemory() {
|
||||
return minimalMemory;
|
||||
}
|
||||
|
||||
public void setMinimalMemory(boolean minimalMemory) {
|
||||
this.minimalMemory = minimalMemory;
|
||||
}
|
||||
|
||||
/**
|
||||
* do not use this in minimal memory mode
|
||||
* @param packagesFolder
|
||||
* @throws IOException
|
||||
*/
|
||||
public void loadFromFolder(String packagesFolder) throws IOException {
|
||||
assert !minimalMemory;
|
||||
|
||||
File[] files = ManagedFileAccess.file(packagesFolder).listFiles();
|
||||
if (files != null) {
|
||||
for (File f : files) {
|
||||
if (f.getName().endsWith(".tgz")) {
|
||||
FileInputStream fs = ManagedFileAccess.inStream(f);
|
||||
try {
|
||||
temporaryPackages.add(NpmPackage.fromPackage(fs));
|
||||
} finally {
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String getFolder() {
|
||||
return cacheFolder.getAbsolutePath();
|
||||
}
|
||||
|
||||
private NpmPackage loadPackageInfo(String path) throws IOException {
|
||||
File f = ManagedFileAccess.file(Utilities.path(path, "usage.ini"));
|
||||
JsonObject j = f.exists() ? JsonParser.parseObject(f) : new JsonObject();
|
||||
j.set("date", new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
|
||||
JsonParser.compose(j, f, true);
|
||||
|
||||
NpmPackage pi = minimalMemory ? NpmPackage.fromFolderMinimal(path) : NpmPackage.fromFolder(path);
|
||||
return pi;
|
||||
return minimalMemory ? NpmPackage.fromFolderMinimal(path, false) : NpmPackage.fromFolder(path, false);
|
||||
}
|
||||
|
||||
private void clearCache() throws IOException {
|
||||
for (File f : cacheFolder.listFiles()) {
|
||||
for (File f : Objects.requireNonNull(cacheFolder.listFiles())) {
|
||||
if (f.isDirectory()) {
|
||||
new FilesystemPackageCacheLock(cacheFolder, f.getName()).doWriteWithLock(() -> {
|
||||
Utilities.clearDirectory(f.getAbsolutePath());
|
||||
Utilities.clearDirectory(f.getAbsolutePath());
|
||||
try {
|
||||
FileUtils.deleteDirectory(f);
|
||||
} catch (Exception e1) {
|
||||
try {
|
||||
FileUtils.deleteDirectory(f);
|
||||
} catch (Exception e1) {
|
||||
try {
|
||||
FileUtils.deleteDirectory(f);
|
||||
} catch (Exception e2) {
|
||||
// just give up
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
// just give up
|
||||
}
|
||||
return null; // must return something
|
||||
});
|
||||
} else if (!f.getName().equals("packages.ini"))
|
||||
}
|
||||
|
||||
} else if (!f.getName().equals("packages.ini")) {
|
||||
FileUtils.forceDelete(f);
|
||||
}
|
||||
|
||||
}
|
||||
IniFile ini = new IniFile(Utilities.path(cacheFolder, "packages.ini"));
|
||||
ini.removeSection("packages");
|
||||
ini.save();
|
||||
}
|
||||
|
||||
private void createIniFile() throws IOException {
|
||||
IniFile ini = new IniFile(Utilities.path(cacheFolder, "packages.ini"));
|
||||
boolean save = false;
|
||||
String v = ini.getStringProperty("cache", "version");
|
||||
if (!CACHE_VERSION.equals(v)) {
|
||||
clearCache();
|
||||
ini.setStringProperty("cache", "version", CACHE_VERSION, null);
|
||||
ini.save();
|
||||
}
|
||||
IniFile ini = new IniFile(getPackagesIniPath());
|
||||
ini.setStringProperty("cache", "version", CACHE_VERSION, null);
|
||||
ini.save();
|
||||
}
|
||||
|
||||
private String getPackagesIniPath() throws IOException {
|
||||
return Utilities.path(cacheFolder, "packages.ini");
|
||||
}
|
||||
|
||||
private void checkValidVersionString(String version, String id) {
|
||||
|
@ -316,23 +315,13 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
}
|
||||
}
|
||||
|
||||
private void listSpecs(Map<String, String> specList, PackageServer server) throws IOException {
|
||||
PackageClient pc = new PackageClient(server);
|
||||
List<PackageInfo> matches = pc.search(null, null, null, false);
|
||||
for (PackageInfo m : matches) {
|
||||
if (!specList.containsKey(m.getId())) {
|
||||
specList.put(m.getId(), m.getUrl());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected InputStreamWithSrc loadFromPackageServer(String id, String version) {
|
||||
InputStreamWithSrc retVal = super.loadFromPackageServer(id, version);
|
||||
if (retVal != null) {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
retVal = super.loadFromPackageServer(id, VersionUtilities.getMajMin(version)+".x");
|
||||
retVal = super.loadFromPackageServer(id, VersionUtilities.getMajMin(version) + ".x");
|
||||
if (retVal != null) {
|
||||
return retVal;
|
||||
}
|
||||
|
@ -344,7 +333,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
public String getLatestVersion(String id) throws IOException {
|
||||
for (PackageServer nextPackageServer : getPackageServers()) {
|
||||
// special case:
|
||||
if (!(Utilities.existsInList(id,CommonPackages.ID_PUBPACK, "hl7.terminology.r5") && PackageServer.PRIMARY_SERVER.equals(nextPackageServer.getUrl()))) {
|
||||
if (!(Utilities.existsInList(id, CommonPackages.ID_PUBPACK, "hl7.terminology.r5") && PackageServer.PRIMARY_SERVER.equals(nextPackageServer.getUrl()))) {
|
||||
PackageClient pc = new PackageClient(nextPackageServer);
|
||||
try {
|
||||
return pc.getLatestVersion(id);
|
||||
|
@ -356,7 +345,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
try {
|
||||
return fetchVersionTheOldWay(id);
|
||||
} catch (Exception e) {
|
||||
ourLog.info("Failed to determine latest version of package {} from server: {}", id, "build.fhir.org");
|
||||
ourLog.info("Failed to determine latest version of package {} from server: {}", id, "build.fhir.org");
|
||||
}
|
||||
// still here? use the latest version we previously found or at least, is in the cache
|
||||
|
||||
|
@ -364,7 +353,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
if (version != null) {
|
||||
return version;
|
||||
}
|
||||
throw new FHIRException("Unable to find the last version for package "+id+": no local copy, and no network access");
|
||||
throw new FHIRException("Unable to find the last version for package " + id + ": no local copy, and no network access");
|
||||
}
|
||||
|
||||
public String getLatestVersionFromCache(String id) throws IOException {
|
||||
|
@ -372,8 +361,8 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
File cf = ManagedFileAccess.file(Utilities.path(cacheFolder, f));
|
||||
if (cf.isDirectory()) {
|
||||
if (f.startsWith(id + "#")) {
|
||||
String ver = f.substring(f.indexOf("#")+1);
|
||||
ourLog.info("Latest version of package {} found locally is {} - using that", id, ver);
|
||||
String ver = f.substring(f.indexOf("#") + 1);
|
||||
ourLog.info("Latest version of package {} found locally is {} - using that", id, ver);
|
||||
return ver;
|
||||
}
|
||||
}
|
||||
|
@ -399,10 +388,13 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
/**
|
||||
* Clear the cache
|
||||
*
|
||||
* @throws IOException
|
||||
* @throws IOException If the cache cannot be cleared
|
||||
*/
|
||||
public void clear() throws IOException {
|
||||
clearCache();
|
||||
this.locks.getCacheLock().doWriteWithLock(() -> {
|
||||
clearCache();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
// ========================= Utilities ============================================================================
|
||||
|
@ -410,38 +402,39 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
/**
|
||||
* Remove a particular package from the cache
|
||||
*
|
||||
* @param id
|
||||
* @param ver
|
||||
* @throws IOException
|
||||
* @param id The id of the package to remove
|
||||
* @param version The literal version of the package to remove. Values such as 'current' and 'dev' are not allowed.
|
||||
* @throws IOException If the package cannot be removed
|
||||
*/
|
||||
public void removePackage(String id, String ver) throws IOException {
|
||||
new FilesystemPackageCacheLock(cacheFolder, id + "#" + ver).doWriteWithLock(() -> {
|
||||
String f = Utilities.path(cacheFolder, id + "#" + ver);
|
||||
public void removePackage(String id, String version) throws IOException {
|
||||
locks.getPackageLock(id + "#" + version).doWriteWithLock(() -> {
|
||||
|
||||
String f = Utilities.path(cacheFolder, id + "#" + version);
|
||||
File ff = ManagedFileAccess.file(f);
|
||||
if (ff.exists()) {
|
||||
Utilities.clearDirectory(f);
|
||||
IniFile ini = new IniFile(Utilities.path(cacheFolder, "packages.ini"));
|
||||
ini.removeProperty("packages", id + "#" + ver);
|
||||
ini.save();
|
||||
ff.delete();
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the identified package from the cache - if it exists
|
||||
* <p>
|
||||
* <p/>
|
||||
* This is for special purpose only (testing, control over speed of loading).
|
||||
* <p/>
|
||||
* Generally, use the loadPackage method
|
||||
*
|
||||
* @param id
|
||||
* @param version
|
||||
* @return
|
||||
* @throws IOException
|
||||
* @param id The id of the package to load
|
||||
* @param version The version of the package to load. Values such as 'current' and 'dev' are allowed.
|
||||
* @return The package, or null if it is not found
|
||||
* @throws IOException If the package cannot be loaded
|
||||
*/
|
||||
@Override
|
||||
public NpmPackage loadPackageFromCacheOnly(String id, String version) throws IOException {
|
||||
|
||||
if (!Utilities.noString(version) && version.startsWith("file:")) {
|
||||
return loadPackageFromFile(id, version.substring(5));
|
||||
}
|
||||
|
@ -454,110 +447,144 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return p;
|
||||
}
|
||||
}
|
||||
String foundPackage = null;
|
||||
String foundVersion = null;
|
||||
for (String f : Utilities.reverseSorted(cacheFolder.list())) {
|
||||
File cf = ManagedFileAccess.file(Utilities.path(cacheFolder, f));
|
||||
if (cf.isDirectory()) {
|
||||
if (f.equals(id + "#" + version) || (Utilities.noString(version) && f.startsWith(id + "#"))) {
|
||||
return loadPackageInfo(Utilities.path(cacheFolder, f));
|
||||
|
||||
String foundPackageFolder = findPackageFolder(id, version);
|
||||
if (foundPackageFolder != null) {
|
||||
NpmPackage foundPackage = locks.getPackageLock(foundPackageFolder).doReadWithLock(() -> {
|
||||
String path = Utilities.path(cacheFolder, foundPackageFolder);
|
||||
File directory = ManagedFileAccess.file(path);
|
||||
|
||||
/* Check if the directory still exists now that we have a read lock. findPackageFolder does no locking in order
|
||||
to avoid locking every potential package directory, so it's possible that a package deletion has occurred.
|
||||
* */
|
||||
if (!directory.exists()) {
|
||||
return null;
|
||||
}
|
||||
if (version != null && !version.equals("current") && (version.endsWith(".x") || Utilities.charCount(version, '.') < 2) && f.contains("#")) {
|
||||
String[] parts = f.split("#");
|
||||
if (parts[0].equals(id) && VersionUtilities.isMajMinOrLaterPatch((foundVersion!=null ? foundVersion : version),parts[1])) {
|
||||
foundVersion = parts[1];
|
||||
foundPackage = f;
|
||||
return loadPackageInfo(path);
|
||||
});
|
||||
if (foundPackage != null) {
|
||||
if (foundPackage.isIndexed()){
|
||||
return foundPackage;
|
||||
} else {
|
||||
return locks.getPackageLock(foundPackageFolder).doWriteWithLock(() -> {
|
||||
File directory = ManagedFileAccess.file(foundPackage.getPath());
|
||||
|
||||
/* Check if the directory still exists now that we have a write lock. findPackageFolder does no locking in order
|
||||
to avoid locking every potential package directory, so it's possible that a package deletion has occurred.
|
||||
* */
|
||||
if (!directory.exists()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Since another thread may have already indexed the package since our read, we need to check again
|
||||
NpmPackage output = loadPackageInfo(foundPackage.getPath());
|
||||
if (output.isIndexed()) {
|
||||
return output;
|
||||
}
|
||||
String path = Utilities.path(cacheFolder, foundPackageFolder);
|
||||
output.checkIndexed(path);
|
||||
return output;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (foundPackage!=null) {
|
||||
return loadPackageInfo(Utilities.path(cacheFolder, foundPackage));
|
||||
}
|
||||
if ("dev".equals(version))
|
||||
return loadPackageFromCacheOnly(id, "current");
|
||||
else
|
||||
return null;
|
||||
}
|
||||
|
||||
private String findPackageFolder(String id, String version) throws IOException {
|
||||
String foundPackageFolder = null;
|
||||
String foundVersion = null;
|
||||
for (String currentPackageFolder : Utilities.reverseSorted(cacheFolder.list())) {
|
||||
File cf = ManagedFileAccess.file(Utilities.path(cacheFolder, currentPackageFolder));
|
||||
if (cf.isDirectory()) {
|
||||
if (currentPackageFolder.equals(id + "#" + version) || (Utilities.noString(version) && currentPackageFolder.startsWith(id + "#"))) {
|
||||
return currentPackageFolder;
|
||||
}
|
||||
if (version != null && !version.equals("current") && (version.endsWith(".x") || Utilities.charCount(version, '.') < 2) && currentPackageFolder.contains("#")) {
|
||||
String[] parts = currentPackageFolder.split("#");
|
||||
if (parts[0].equals(id) && VersionUtilities.isMajMinOrLaterPatch((foundVersion != null ? foundVersion : version), parts[1])) {
|
||||
foundVersion = parts[1];
|
||||
foundPackageFolder = currentPackageFolder;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return foundPackageFolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an already fetched package to the cache
|
||||
*/
|
||||
@Override
|
||||
public NpmPackage addPackageToCache(String id, String version, InputStream packageTgzInputStream, String sourceDesc) throws IOException {
|
||||
public NpmPackage addPackageToCache(final String id, final String version, final InputStream packageTgzInputStream, final String sourceDesc) throws IOException {
|
||||
checkValidVersionString(version, id);
|
||||
|
||||
String uuid = UUID.randomUUID().toString().toLowerCase();
|
||||
String tempDir = Utilities.path(cacheFolder, uuid);
|
||||
NpmPackage npm = NpmPackage.extractFromTgz(packageTgzInputStream, sourceDesc, tempDir, minimalMemory);
|
||||
return locks.getPackageLock(id + "#" + version).doWriteWithLock(() -> {
|
||||
String uuid = UUID.randomUUID().toString().toLowerCase();
|
||||
String tempDir = Utilities.path(cacheFolder, uuid);
|
||||
|
||||
NpmPackage npm = NpmPackage.extractFromTgz(packageTgzInputStream, sourceDesc, tempDir, minimalMemory);
|
||||
|
||||
if (progress) {
|
||||
log("");
|
||||
logn("Installing "+id+"#"+version);
|
||||
}
|
||||
|
||||
if ((npm.name() != null && id != null && !id.equalsIgnoreCase(npm.name()))) {
|
||||
if (!suppressErrors && (!id.equals("hl7.fhir.r5.core") && !id.equals("hl7.fhir.us.immds"))) {// temporary work around
|
||||
throw new IOException("Attempt to import a mis-identified package. Expected " + id + ", got " + npm.name());
|
||||
}
|
||||
}
|
||||
if (version == null) {
|
||||
version = npm.version();
|
||||
}
|
||||
log("Installing " + id + "#" + version);
|
||||
|
||||
String v = version;
|
||||
return new FilesystemPackageCacheLock(cacheFolder, id + "#" + version).doWriteWithLock(() -> {
|
||||
NpmPackage pck = null;
|
||||
String packRoot = Utilities.path(cacheFolder, id + "#" + v);
|
||||
if ((npm.name() != null && id != null && !id.equalsIgnoreCase(npm.name()))) {
|
||||
if (!suppressErrors && (!id.equals("hl7.fhir.r5.core") && !id.equals("hl7.fhir.us.immds"))) {// temporary work around
|
||||
throw new IOException("Attempt to import a mis-identified package. Expected " + id + ", got " + npm.name());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
NpmPackage npmPackage = null;
|
||||
String packageRoot = Utilities.path(cacheFolder, id + "#" + version);
|
||||
try {
|
||||
// ok, now we have a lock on it... check if something created it while we were waiting
|
||||
if (!ManagedFileAccess.file(packRoot).exists() || Utilities.existsInList(v, "current", "dev")) {
|
||||
Utilities.createDirectory(packRoot);
|
||||
if (!ManagedFileAccess.file(packageRoot).exists() || Utilities.existsInList(version, "current", "dev")) {
|
||||
Utilities.createDirectory(packageRoot);
|
||||
try {
|
||||
Utilities.clearDirectory(packRoot);
|
||||
Utilities.clearDirectory(packageRoot);
|
||||
} catch (Throwable t) {
|
||||
log("Unable to clear directory: "+packRoot+": "+t.getMessage()+" - this may cause problems later");
|
||||
log("Unable to clear directory: " + packageRoot + ": " + t.getMessage() + " - this may cause problems later");
|
||||
}
|
||||
Utilities.renameDirectory(tempDir, packRoot);
|
||||
Utilities.renameDirectory(tempDir, packageRoot);
|
||||
|
||||
IniFile ini = new IniFile(Utilities.path(cacheFolder, "packages.ini"));
|
||||
ini.setTimeStampFormat(INI_TIMESTAMP_FORMAT);
|
||||
ini.setTimestampProperty("packages", id + "#" + v, ZonedDateTime.now(), null);
|
||||
ini.setIntegerProperty("package-sizes", id + "#" + v, npm.getSize(), null);
|
||||
ini.save();
|
||||
if (progress)
|
||||
log(" done.");
|
||||
log(" done.");
|
||||
} else {
|
||||
Utilities.clearDirectory(tempDir);
|
||||
ManagedFileAccess.file(tempDir).delete();
|
||||
}
|
||||
if (!id.equals(npm.getNpm().asString("name")) || !v.equals(npm.getNpm().asString("version"))) {
|
||||
if (!id.equals(npm.getNpm().asString("name")) || !version.equals(npm.getNpm().asString("version"))) {
|
||||
if (!id.equals(npm.getNpm().asString("name"))) {
|
||||
npm.getNpm().add("original-name", npm.getNpm().asString("name"));
|
||||
npm.getNpm().remove("name");
|
||||
npm.getNpm().add("name", id);
|
||||
}
|
||||
if (!v.equals(npm.getNpm().asString("version"))) {
|
||||
if (!version.equals(npm.getNpm().asString("version"))) {
|
||||
npm.getNpm().add("original-version", npm.getNpm().asString("version"));
|
||||
npm.getNpm().remove("version");
|
||||
npm.getNpm().add("version", v);
|
||||
npm.getNpm().add("version", version);
|
||||
}
|
||||
TextFile.stringToFile(JsonParser.compose(npm.getNpm(), true), Utilities.path(cacheFolder, id + "#" + v, "package", "package.json"));
|
||||
TextFile.stringToFile(JsonParser.compose(npm.getNpm(), true), Utilities.path(cacheFolder, id + "#" + version, "package", "package.json"));
|
||||
}
|
||||
npmPackage = loadPackageInfo(packageRoot);
|
||||
if (npmPackage != null && !npmPackage.isIndexed()) {
|
||||
npmPackage.checkIndexed(packageRoot);
|
||||
}
|
||||
pck = loadPackageInfo(packRoot);
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
// don't leave a half extracted package behind
|
||||
log("Clean up package " + packRoot + " because installation failed: " + e.getMessage());
|
||||
log("Clean up package " + packageRoot + " because installation failed: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
Utilities.clearDirectory(packRoot);
|
||||
ManagedFileAccess.file(packRoot).delete();
|
||||
} catch (Exception ei) {
|
||||
Utilities.clearDirectory(packageRoot);
|
||||
ManagedFileAccess.file(packageRoot).delete();
|
||||
} catch (Exception ignored) {
|
||||
// nothing
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
return pck;
|
||||
return npmPackage;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -567,12 +594,6 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
}
|
||||
}
|
||||
|
||||
private void logn(String s) {
|
||||
if (!silent) {
|
||||
System.out.print(s);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPackageUrl(String packageId) throws IOException {
|
||||
String result = super.getPackageUrl(packageId);
|
||||
|
@ -583,14 +604,27 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return result;
|
||||
}
|
||||
|
||||
public void listAllIds(Map<String, String> specList) throws IOException {
|
||||
for (NpmPackage p : temporaryPackages) {
|
||||
specList.put(p.name(), p.canonical());
|
||||
/**
|
||||
* do not use this in minimal memory mode
|
||||
* @param packagesFolder
|
||||
* @throws IOException
|
||||
*/
|
||||
public void loadFromFolder(String packagesFolder) throws IOException {
|
||||
assert !minimalMemory;
|
||||
|
||||
File[] files = ManagedFileAccess.file(packagesFolder).listFiles();
|
||||
if (files != null) {
|
||||
for (File f : files) {
|
||||
if (f.getName().endsWith(".tgz")) {
|
||||
FileInputStream fs = ManagedFileAccess.inStream(f);
|
||||
try {
|
||||
temporaryPackages.add(NpmPackage.fromPackage(fs));
|
||||
} finally {
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (PackageServer next : getPackageServers()) {
|
||||
listSpecs(specList, next);
|
||||
}
|
||||
addCIBuildSpecs(specList);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -601,7 +635,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
}
|
||||
|
||||
if (version == null && id.contains("#")) {
|
||||
version = id.substring(id.indexOf("#")+1);
|
||||
version = id.substring(id.indexOf("#") + 1);
|
||||
id = id.substring(0, id.indexOf("#"));
|
||||
}
|
||||
|
||||
|
@ -629,10 +663,8 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
version = "current";
|
||||
}
|
||||
|
||||
if (progress) {
|
||||
log("Installing " + id + "#" + (version == null ? "?" : version) + " to the package cache");
|
||||
log(" Fetching:");
|
||||
}
|
||||
log("Installing " + id + "#" + (version == null ? "?" : version) + " to the package cache");
|
||||
log(" Fetching:");
|
||||
|
||||
// nup, don't have it locally (or it's expired)
|
||||
FilesystemPackageCacheManager.InputStreamWithSrc source;
|
||||
|
@ -640,14 +672,14 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
source = packageProvider.provide(id, version);
|
||||
} else if (Utilities.isAbsoluteUrl(version)) {
|
||||
source = fetchSourceFromUrlSpecific(version);
|
||||
} else if ("current".equals(version) || (version!= null && version.startsWith("current$"))) {
|
||||
} else if ("current".equals(version) || (version != null && version.startsWith("current$"))) {
|
||||
// special case - fetch from ci-build server
|
||||
source = loadFromCIBuild(id, version.startsWith("current$") ? version.substring(8) : null);
|
||||
} else {
|
||||
source = loadFromPackageServer(id, version);
|
||||
}
|
||||
if (source == null) {
|
||||
throw new FHIRException("Unable to find package "+id+"#"+version);
|
||||
throw new FHIRException("Unable to find package " + id + "#" + version);
|
||||
}
|
||||
return addPackageToCache(id, source.version, source.stream, source.url);
|
||||
}
|
||||
|
@ -665,7 +697,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
if (optional)
|
||||
return null;
|
||||
else
|
||||
throw new FHIRException("Unable to fetch: "+e.getMessage(), e);
|
||||
throw new FHIRException("Unable to fetch: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -677,18 +709,18 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
try {
|
||||
stream = fetchFromUrlSpecific(Utilities.pathURL(ciList.get(id), "package.tgz"), false);
|
||||
} catch (Exception e) {
|
||||
stream = fetchFromUrlSpecific(Utilities.pathURL(ciList.get(id), "branches", "main", "package.tgz"), false);
|
||||
stream = fetchFromUrlSpecific(Utilities.pathURL(ciList.get(id), "branches", "main", "package.tgz"), false);
|
||||
}
|
||||
return new InputStreamWithSrc(stream, Utilities.pathURL(ciList.get(id), "package.tgz"), "current");
|
||||
} else {
|
||||
InputStream stream = fetchFromUrlSpecific(Utilities.pathURL(ciList.get(id), "branches", branch, "package.tgz"), false);
|
||||
return new InputStreamWithSrc(stream, Utilities.pathURL(ciList.get(id), "branches", branch, "package.tgz"), "current$"+branch);
|
||||
return new InputStreamWithSrc(stream, Utilities.pathURL(ciList.get(id), "branches", branch, "package.tgz"), "current$" + branch);
|
||||
}
|
||||
} else if (id.startsWith("hl7.fhir.r6")) {
|
||||
InputStream stream = fetchFromUrlSpecific(Utilities.pathURL("http://build.fhir.org", id + ".tgz"), false);
|
||||
return new InputStreamWithSrc(stream, Utilities.pathURL("http://build.fhir.org", id + ".tgz"), "current");
|
||||
InputStream stream = fetchFromUrlSpecific(Utilities.pathURL("https://build.fhir.org", id + ".tgz"), false);
|
||||
return new InputStreamWithSrc(stream, Utilities.pathURL("https://build.fhir.org", id + ".tgz"), "current");
|
||||
} else {
|
||||
throw new FHIRException("The package '" + id + "' has no entry on the current build server ("+ciList.toString()+")");
|
||||
throw new FHIRException("The package '" + id + "' has no entry on the current build server (" + ciList + ")");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -702,24 +734,14 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return null;
|
||||
}
|
||||
|
||||
private void addCIBuildSpecs(Map<String, String> specList) throws IOException {
|
||||
checkBuildLoaded();
|
||||
for (JsonElement n : buildInfo) {
|
||||
JsonObject o = (JsonObject) n;
|
||||
if (!specList.containsKey(o.asString("package-id"))) {
|
||||
specList.put(o.asString("package-id"), o.asString("url"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPackageId(String canonicalUrl) throws IOException {
|
||||
String retVal = findCanonicalInLocalCache(canonicalUrl);
|
||||
|
||||
if(retVal == null) {
|
||||
|
||||
if (retVal == null) {
|
||||
retVal = super.getPackageId(canonicalUrl);
|
||||
}
|
||||
|
||||
|
||||
if (retVal == null) {
|
||||
retVal = getPackageIdFromBuildList(canonicalUrl);
|
||||
}
|
||||
|
@ -745,7 +767,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
|
||||
// ========================= Package Mgmt API =======================================================================
|
||||
|
||||
private String getPackageIdFromBuildList(String canonical) throws IOException {
|
||||
private String getPackageIdFromBuildList(String canonical) {
|
||||
if (canonical == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -767,7 +789,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return null;
|
||||
}
|
||||
|
||||
private NpmPackage checkCurrency(String id, NpmPackage p) throws IOException {
|
||||
private NpmPackage checkCurrency(String id, NpmPackage p) {
|
||||
checkBuildLoaded();
|
||||
// special case: current versions roll over, and we have to check their currency
|
||||
try {
|
||||
|
@ -779,7 +801,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return null; // nup, we need a new copy
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log("Unable to check package currency: "+id+": "+id);
|
||||
log("Unable to check package currency: " + id + ": " + id);
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
@ -795,7 +817,6 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
loadFromBuildServer();
|
||||
} catch (Exception e2) {
|
||||
log("Error connecting to build server - running without build (" + e2.getMessage() + ")");
|
||||
// e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -825,7 +846,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
ciList.put(bld.getPackageId(), "https://build.fhir.org/ig/" + bld.getRepo());
|
||||
}
|
||||
}
|
||||
buildLoaded = true;
|
||||
buildLoaded = true;
|
||||
}
|
||||
|
||||
private String getRepo(String path) {
|
||||
|
@ -849,8 +870,8 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
if (url == null) {
|
||||
try {
|
||||
url = getPackageUrlFromBuildList(id);
|
||||
} catch (Exception e) {
|
||||
url = null;
|
||||
} catch (Exception ignored) {
|
||||
|
||||
}
|
||||
}
|
||||
if (url == null) {
|
||||
|
@ -860,16 +881,14 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
url = url.substring(0, url.indexOf("/ImplementationGuide/"));
|
||||
}
|
||||
String pu = Utilities.pathURL(url, "package-list.json");
|
||||
String aurl = pu;
|
||||
|
||||
PackageList pl;
|
||||
try {
|
||||
pl = PackageList.fromUrl(pu);
|
||||
} catch (Exception e) {
|
||||
String pv = Utilities.pathURL(url, v, "package.tgz");
|
||||
try {
|
||||
aurl = pv;
|
||||
InputStreamWithSrc src = new InputStreamWithSrc(fetchFromUrlSpecific(pv, false), pv, v);
|
||||
return src;
|
||||
return new InputStreamWithSrc(fetchFromUrlSpecific(pv, false), pv, v);
|
||||
} catch (Exception e1) {
|
||||
throw new FHIRException("Error fetching package directly (" + pv + "), or fetching package list for " + id + " from " + pu + ": " + e1.getMessage(), e1);
|
||||
}
|
||||
|
@ -878,7 +897,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
throw new FHIRException("Package ids do not match in " + pu + ": " + id + " vs " + pl.pid());
|
||||
for (PackageListEntry vo : pl.versions()) {
|
||||
if (v.equals(vo.version())) {
|
||||
aurl = Utilities.pathURL(vo.path(), "package.tgz");
|
||||
|
||||
String u = Utilities.pathURL(vo.path(), "package.tgz");
|
||||
return new InputStreamWithSrc(fetchFromUrlSpecific(u, true), u, v);
|
||||
}
|
||||
|
@ -916,7 +935,7 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
|
||||
private String getUrlForPackage(String id) {
|
||||
if (CommonPackages.ID_XVER.equals(id)) {
|
||||
return "http://fhir.org/packages/hl7.fhir.xver-extensions";
|
||||
return "https://fhir.org/packages/hl7.fhir.xver-extensions";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -931,18 +950,6 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* if you don't provide and implementation of this interface, the PackageCacheManager will use the web directly.
|
||||
* <p>
|
||||
* You can use this interface to
|
||||
*
|
||||
* @author graha
|
||||
*/
|
||||
public interface INetworkServices {
|
||||
|
||||
InputStream resolvePackage(String packageId, String version);
|
||||
}
|
||||
|
||||
public interface CacheLockFunction<T> {
|
||||
T get() throws IOException;
|
||||
}
|
||||
|
@ -957,10 +964,10 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
|
||||
public class BuildRecord {
|
||||
|
||||
private String url;
|
||||
private String packageId;
|
||||
private String repo;
|
||||
private Date date;
|
||||
private final String url;
|
||||
private final String packageId;
|
||||
private final String repo;
|
||||
private final Date date;
|
||||
|
||||
public BuildRecord(String url, String packageId, String repo, Date date) {
|
||||
super();
|
||||
|
@ -986,47 +993,6 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
return date;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
public class VersionHistory {
|
||||
private String id;
|
||||
private String canonical;
|
||||
private String current;
|
||||
private Map<String, String> versions = new HashMap<>();
|
||||
|
||||
public String getCanonical() {
|
||||
return canonical;
|
||||
}
|
||||
|
||||
public String getCurrent() {
|
||||
return current;
|
||||
}
|
||||
|
||||
public Map<String, String> getVersions() {
|
||||
return versions;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
||||
public class PackageEntry {
|
||||
|
||||
private byte[] bytes;
|
||||
private String name;
|
||||
|
||||
public PackageEntry(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public PackageEntry(String name, byte[] bytes) {
|
||||
this.name = name;
|
||||
this.bytes = bytes;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean packageExists(String id, String ver) throws IOException {
|
||||
|
@ -1078,5 +1044,5 @@ public class FilesystemPackageCacheManager extends BasePackageCacheManager imple
|
|||
FilesystemPackageCacheManager.packageProvider = packageProvider;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,213 @@
|
|||
package org.hl7.fhir.utilities.npm;
|
||||
|
||||
import lombok.Getter;
|
||||
import org.hl7.fhir.utilities.TextFile;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.channels.FileLock;
|
||||
import java.nio.file.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.locks.ReadWriteLock;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
||||
public class FilesystemPackageCacheManagerLocks {
|
||||
|
||||
private static final ConcurrentHashMap<File, FilesystemPackageCacheManagerLocks> cacheFolderLockManagers = new ConcurrentHashMap<>();
|
||||
|
||||
@Getter
|
||||
private final CacheLock cacheLock = new CacheLock();
|
||||
|
||||
private final ConcurrentHashMap<File, PackageLock> packageLocks = new ConcurrentHashMap<>();
|
||||
|
||||
private final File cacheFolder;
|
||||
|
||||
private final Long lockTimeoutTime;
|
||||
|
||||
private final TimeUnit lockTimeoutTimeUnit;
|
||||
|
||||
/**
|
||||
* This method is intended to be used only for testing purposes.
|
||||
* <p/>
|
||||
* To ensure that only one instance of the FilesystemPackageCacheManagerLocks is created for a given cacheFolder, use
|
||||
* the static org.hl7.fhir.utilities.npm.FilesystemPackageCacheManagerLocks#getFilesystemPackageCacheManagerLocks(java.io.File) method.
|
||||
* <p/>
|
||||
* Get all the locks necessary to manage a filesystem cache.
|
||||
*
|
||||
* @param cacheFolder
|
||||
* @throws IOException
|
||||
*/
|
||||
public FilesystemPackageCacheManagerLocks(File cacheFolder) throws IOException {
|
||||
this(cacheFolder, 60L, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
private FilesystemPackageCacheManagerLocks(File cacheFolder, Long lockTimeoutTime, TimeUnit lockTimeoutTimeUnit) throws IOException {
|
||||
this.cacheFolder = cacheFolder;
|
||||
this.lockTimeoutTime = lockTimeoutTime;
|
||||
this.lockTimeoutTimeUnit = lockTimeoutTimeUnit;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is intended to be used only for testing purposes.
|
||||
*/
|
||||
protected FilesystemPackageCacheManagerLocks withLockTimeout(Long lockTimeoutTime, TimeUnit lockTimeoutTimeUnit) throws IOException {
|
||||
return new FilesystemPackageCacheManagerLocks(cacheFolder, lockTimeoutTime, lockTimeoutTimeUnit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a single FilesystemPackageCacheManagerLocks instance for the given cacheFolder.
|
||||
* <p/>
|
||||
* If an instance already exists, it is returned. Otherwise, a new instance is created.
|
||||
* <p/>
|
||||
* Using this method ensures that only one instance of FilesystemPackageCacheManagerLocks is created for a given
|
||||
* cacheFolder, which is useful if multiple ValidationEngine instances are running in parallel.
|
||||
*
|
||||
* @param cacheFolder
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
public static FilesystemPackageCacheManagerLocks getFilesystemPackageCacheManagerLocks(File cacheFolder) throws IOException {
|
||||
return cacheFolderLockManagers.computeIfAbsent(cacheFolder, k -> {
|
||||
try {
|
||||
return new FilesystemPackageCacheManagerLocks(k);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public class CacheLock {
|
||||
private final ReadWriteLock lock;
|
||||
|
||||
protected CacheLock() {
|
||||
lock = new ReentrantReadWriteLock();
|
||||
}
|
||||
|
||||
public ReadWriteLock getLock() {
|
||||
return lock;
|
||||
}
|
||||
|
||||
public <T> T doWriteWithLock(FilesystemPackageCacheManager.CacheLockFunction<T> f) throws IOException {
|
||||
lock.writeLock().lock();
|
||||
T result = null;
|
||||
try {
|
||||
result = f.get();
|
||||
} finally {
|
||||
lock.writeLock().unlock();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
public class PackageLock {
|
||||
@Getter
|
||||
private final File lockFile;
|
||||
private final ReadWriteLock lock;
|
||||
|
||||
protected PackageLock(File lockFile, ReadWriteLock lock) {
|
||||
this.lockFile = lockFile;
|
||||
this.lock = lock;
|
||||
}
|
||||
|
||||
private void checkForLockFileWaitForDeleteIfExists(File lockFile) throws IOException {
|
||||
if (!lockFile.exists()) {
|
||||
return;
|
||||
}
|
||||
try (WatchService watchService = FileSystems.getDefault().newWatchService()) {
|
||||
Path dir = lockFile.getParentFile().toPath();
|
||||
dir.register(watchService, StandardWatchEventKinds.ENTRY_DELETE);
|
||||
|
||||
WatchKey key = watchService.poll(lockTimeoutTime, lockTimeoutTimeUnit);
|
||||
if (key == null) {
|
||||
// It is possible that the lock file is deleted before the watch service is registered, so if we timeout at
|
||||
// this point, we should check if the lock file still exists.
|
||||
if (lockFile.exists()) {
|
||||
throw new TimeoutException("Timeout waiting for lock file deletion: " + lockFile.getName());
|
||||
}
|
||||
} else {
|
||||
for (WatchEvent<?> event : key.pollEvents()) {
|
||||
WatchEvent.Kind<?> kind = event.kind();
|
||||
if (kind == StandardWatchEventKinds.ENTRY_DELETE) {
|
||||
Path deletedFilePath = (Path) event.context();
|
||||
if (deletedFilePath.toString().equals(lockFile.getName())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
key.reset();
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new IOException("Error reading package.", e);
|
||||
} catch (TimeoutException e) {
|
||||
throw new IOException("Error reading package.", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public <T> T doReadWithLock(FilesystemPackageCacheManager.CacheLockFunction<T> f) throws IOException {
|
||||
cacheLock.getLock().readLock().lock();
|
||||
lock.readLock().lock();
|
||||
|
||||
checkForLockFileWaitForDeleteIfExists(lockFile);
|
||||
|
||||
T result = null;
|
||||
try {
|
||||
result = f.get();
|
||||
} finally {
|
||||
lock.readLock().unlock();
|
||||
cacheLock.getLock().readLock().unlock();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public <T> T doWriteWithLock(FilesystemPackageCacheManager.CacheLockFunction<T> f) throws IOException {
|
||||
cacheLock.getLock().writeLock().lock();
|
||||
lock.writeLock().lock();
|
||||
|
||||
if (!lockFile.isFile()) {
|
||||
try {
|
||||
TextFile.stringToFile("", lockFile);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
try (FileChannel channel = new RandomAccessFile(lockFile, "rw").getChannel()) {
|
||||
FileLock fileLock = null;
|
||||
while (fileLock == null) {
|
||||
fileLock = channel.tryLock(0, Long.MAX_VALUE, true);
|
||||
if (fileLock == null) {
|
||||
Thread.sleep(100); // Wait and retry
|
||||
}
|
||||
}
|
||||
T result = null;
|
||||
try {
|
||||
result = f.get();
|
||||
} finally {
|
||||
fileLock.release();
|
||||
channel.close();
|
||||
if (!lockFile.delete()) {
|
||||
lockFile.deleteOnExit();
|
||||
}
|
||||
lock.writeLock().unlock();
|
||||
cacheLock.getLock().writeLock().unlock();
|
||||
}
|
||||
return result;
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new IOException("Thread interrupted while waiting for lock", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized PackageLock getPackageLock(String packageName) throws IOException {
|
||||
File lockFile = new File(Utilities.path(cacheFolder.getAbsolutePath(), packageName + ".lock"));
|
||||
return packageLocks.computeIfAbsent(lockFile, (k) -> new PackageLock(k, new ReentrantReadWriteLock()));
|
||||
}
|
||||
}
|
|
@ -357,9 +357,15 @@ public class NpmPackage {
|
|||
* Factory method that parses a package from an extracted folder
|
||||
*/
|
||||
public static NpmPackage fromFolder(String path) throws IOException {
|
||||
return fromFolder(path, true);
|
||||
}
|
||||
|
||||
public static NpmPackage fromFolder(String path, boolean checkIndexed) throws IOException {
|
||||
NpmPackage res = new NpmPackage();
|
||||
res.loadFiles(path, ManagedFileAccess.file(path));
|
||||
res.checkIndexed(path);
|
||||
if (checkIndexed) {
|
||||
res.checkIndexed(path);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -367,10 +373,15 @@ public class NpmPackage {
|
|||
* Factory method that parses a package from an extracted folder
|
||||
*/
|
||||
public static NpmPackage fromFolderMinimal(String path) throws IOException {
|
||||
return fromFolderMinimal(path, true);
|
||||
}
|
||||
|
||||
public static NpmPackage fromFolderMinimal(String path, boolean checkIndexed) throws IOException {
|
||||
NpmPackage res = new NpmPackage();
|
||||
res.minimalMemory = true;
|
||||
res.loadFiles(path, ManagedFileAccess.file(path));
|
||||
res.checkIndexed(path);
|
||||
if (checkIndexed) {
|
||||
res.checkIndexed(path);}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -616,7 +627,18 @@ public class NpmPackage {
|
|||
index.content.put(n, data);
|
||||
}
|
||||
|
||||
private void checkIndexed(String desc) throws IOException {
|
||||
public boolean isIndexed() throws IOException {
|
||||
for (NpmPackageFolder folder : folders.values()) {
|
||||
JsonObject index = folder.index();
|
||||
if (folder.index() == null || index.forceArray("files").size() == 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void checkIndexed(String desc) throws IOException {
|
||||
for (NpmPackageFolder folder : folders.values()) {
|
||||
JsonObject index = folder.index();
|
||||
if (index == null || index.forceArray("files").size() == 0) {
|
||||
|
@ -626,6 +648,7 @@ public class NpmPackage {
|
|||
}
|
||||
|
||||
|
||||
|
||||
public void indexFolder(String desc, NpmPackageFolder folder) throws FileNotFoundException, IOException {
|
||||
List<String> remove = new ArrayList<>();
|
||||
NpmPackageIndexBuilder indexer = new NpmPackageIndexBuilder();
|
||||
|
|
|
@ -129,7 +129,7 @@ public class NpmPackageIndexBuilder {
|
|||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.out.println("Error parsing "+name+": "+e.getMessage());
|
||||
// System.out.println("Error parsing "+name+": "+e.getMessage());
|
||||
if (name.contains("openapi")) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -229,7 +229,7 @@ public class XLSXmlNormaliser {
|
|||
|
||||
private void saveXml(FileOutputStream stream) throws TransformerException, IOException {
|
||||
|
||||
TransformerFactory factory = TransformerFactory.newInstance();
|
||||
TransformerFactory factory = XMLUtil.newXXEProtectedTransformerFactory();
|
||||
Transformer transformer = factory.newTransformer();
|
||||
Result result = new StreamResult(stream);
|
||||
Source source = new DOMSource(xml);
|
||||
|
|
|
@ -42,6 +42,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.xml.XMLConstants;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
@ -501,8 +502,16 @@ public class XMLUtil {
|
|||
return e == null ? null : e.getAttribute(aname);
|
||||
}
|
||||
|
||||
public static void writeDomToFile(Document doc, String filename) throws TransformerException, IOException {
|
||||
public static TransformerFactory newXXEProtectedTransformerFactory() {
|
||||
TransformerFactory transformerFactory = TransformerFactory.newInstance();
|
||||
transformerFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, "");
|
||||
transformerFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_STYLESHEET, "");
|
||||
return transformerFactory;
|
||||
}
|
||||
|
||||
|
||||
public static void writeDomToFile(Document doc, String filename) throws TransformerException, IOException {
|
||||
TransformerFactory transformerFactory = XMLUtil.newXXEProtectedTransformerFactory();
|
||||
Transformer transformer = transformerFactory.newTransformer();
|
||||
DOMSource source = new DOMSource(doc);
|
||||
StreamResult streamResult = new StreamResult(ManagedFileAccess.file(filename));
|
||||
|
@ -593,7 +602,7 @@ public class XMLUtil {
|
|||
}
|
||||
|
||||
public static void saveToFile(Element root, OutputStream stream) throws TransformerException {
|
||||
Transformer transformer = TransformerFactory.newInstance().newTransformer();
|
||||
Transformer transformer = XMLUtil.newXXEProtectedTransformerFactory().newTransformer();
|
||||
Result output = new StreamResult(stream);
|
||||
Source input = new DOMSource(root);
|
||||
|
||||
|
|
|
@ -1137,4 +1137,7 @@ TYPE_SPECIFIC_CHECKS_DT_XHTML_UNKNOWN_HREF = Hyperlink scheme ''{3}'' in ''{0}''
|
|||
TYPE_SPECIFIC_CHECKS_DT_XHTML_LITERAL_HREF = Hyperlink scheme ''{3}'' in ''{0}'' at ''{1}'' for ''{2}'' is not a valid hyperlinkable scheme
|
||||
SM_TARGET_TYPE_UNKNOWN = The type of the target variable is not known: {0}
|
||||
XHTML_XHTML_ATTRIBUTE_XML_SPACE = The attribute 'xml:space' is legal but has a fixed value of 'preserve'. It''s use is discouraged
|
||||
|
||||
VALIDATION_HL7_PUBLISHER_MULTIPLE_WGS = This resource has more than workgroup extension (http://hl7.org/fhir/StructureDefinition/structuredefinition-wg)
|
||||
NO_VALID_DISPLAY_FOUND_NONE_FOR_LANG = Wrong Display Name ''{0}'' for {1}#{2}. There are no valid display names found for language(s) ''{3}''. Default display is ''{4}''
|
||||
NO_VALID_DISPLAY_AT_ALL = Cannot validate display Name ''{0}'' for {1}#{2}: No displays are known
|
||||
|
||||
|
|
|
@ -74,6 +74,7 @@ CAPABILITY_CORS_YES = Enable CORS: yes
|
|||
CAPABILITY_CREATE_INT = POST a new resource (create interaction)
|
||||
GENERAL_CRIT = Criteria
|
||||
CAPABILITY_DELETE_INT = DELETE a resource (delete interaction)
|
||||
CAPABILITY_ERR_DET = Error detected
|
||||
CAPABILITY_EXT_OP = Extended Operations
|
||||
CAPABILITY_FHIR = Core FHIR Resource
|
||||
CAPABILITY_FHIR_VER = FHIR Version: {0}
|
||||
|
@ -84,6 +85,8 @@ CAPABILITY_INT = interaction.
|
|||
CAPABILITY_INTER_SUPP = The interactions supported by each resource (
|
||||
CAPABILITY_INT_DESC = interaction described as follows:
|
||||
CAPABILITY_INT_SUMM = Interaction summary
|
||||
CAPABILITY_MAY_SUPP = MAY Support the Following Implementation Guides
|
||||
CAPABILITY_MULT_EXT = this mark indicates that there are more than one expectation extensions present
|
||||
CAPABILITY_NOTE_CAP = Note to Implementers: FHIR Capabilities
|
||||
CAPABILITY_OP = Operations
|
||||
CAPABILITY_OPER = Operation
|
||||
|
@ -95,6 +98,7 @@ PARS_SUMMARY_LIST = Parameters: {0}
|
|||
CAPABILITY_PATCH_INT = PATCH a new resource version (patch interaction)
|
||||
GENERAL_PROF = Profile
|
||||
CAPABILITY_PROF_CONF = Profile Conformance
|
||||
CAPABILITY_PROF_RES_DOC = Document Resource Profile
|
||||
CAPABILITY_PROF_MAP = Profile Mapping
|
||||
CAPABILITY_PUB_BY = Published by: {0}
|
||||
CAPABILITY_PUB_ON = Published on: {0}
|
||||
|
@ -102,6 +106,9 @@ CAPABILITY_READ_INT = GET a resource (read interaction)
|
|||
CAPABILITY_REF_PROF = Reference Policy
|
||||
CAPABILITY_REQ_RECOM = Required and recommended search parameters
|
||||
CAPABILITY_REST_CAPS = FHIR RESTful Capabilities
|
||||
CAPABILITY_DOCUMENT_CAPS = FHIR Document Capabilities
|
||||
CAPABILITY_MESSAGING_CAPS = FHIR Messaging Capabilities
|
||||
CAPABILITY_MESSAGING_CAP = Messaging Capability
|
||||
CAPABILITY_REST_CONFIG = REST Configuration: {0}
|
||||
CAPABILITY_RES_CONF = Resource Conformance: {0}
|
||||
CAPABILITY_RES_ENB = The linked resources enabled for
|
||||
|
@ -122,7 +129,10 @@ CAPABILITY_SUPPS = Supports
|
|||
CAPABILITY_SUPP_FORM = Supported Formats:
|
||||
CAPABILITY_SUPP_PATCH_FORM = Supported Patch Formats:
|
||||
CAPABILITY_SUPP_PROFS = Supported Profiles
|
||||
CAPABILITY_SUPP_THE = Supports the
|
||||
CAPABILITY_SUPP_MSGS = Supported Message(s)
|
||||
CAPABILITY_ENDPOINTS = Endpoint(s)
|
||||
CAPABILITY_SUPP_THE = support the
|
||||
CAPABILITY_SUPPS_THE = Supports the
|
||||
GENERAL_TYPE = Type
|
||||
CAPABILITY_TYPS = Types
|
||||
CAPABILITY_TYP_PRES = ype are only present if at least one of the resources has support for them.
|
||||
|
|
|
@ -1,24 +1,111 @@
|
|||
package org.hl7.fhir.utilities;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import okhttp3.HttpUrl;
|
||||
import okhttp3.mockwebserver.MockResponse;
|
||||
import okhttp3.mockwebserver.MockWebServer;
|
||||
import okhttp3.mockwebserver.RecordedRequest;
|
||||
import org.hl7.fhir.utilities.http.HTTPResult;
|
||||
import org.hl7.fhir.utilities.http.SimpleHTTPClient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
public class SimpleHTTPClientTest {
|
||||
|
||||
private MockWebServer server;
|
||||
|
||||
@BeforeEach
|
||||
void setup() {
|
||||
setupMockServer();
|
||||
}
|
||||
|
||||
void setupMockServer() {
|
||||
server = new MockWebServer();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleHTTPClient() throws IOException {
|
||||
public void testGetApplicationJson() throws IOException, InterruptedException {
|
||||
|
||||
HttpUrl serverUrl = server.url("fhir/us/core/package-list.json?nocache=1724353440974");
|
||||
|
||||
server.enqueue(
|
||||
new MockResponse()
|
||||
.setBody("Monkeys").setResponseCode(200)
|
||||
);
|
||||
|
||||
SimpleHTTPClient http = new SimpleHTTPClient();
|
||||
String url = "https://hl7.org/fhir/us/core/package-list.json?nocache=" + System.currentTimeMillis();
|
||||
|
||||
HTTPResult res = http.get(url, "application/json");
|
||||
HTTPResult res = http.get(serverUrl.url().toString(), "application/json");
|
||||
|
||||
assertThat(res.getCode()).isEqualTo(200);
|
||||
|
||||
RecordedRequest packageRequest = server.takeRequest();
|
||||
|
||||
assert packageRequest.getRequestUrl() != null;
|
||||
assertThat(packageRequest.getRequestUrl().toString()).isEqualTo(serverUrl.url().toString());
|
||||
assertThat(packageRequest.getMethod()).isEqualTo("GET");
|
||||
assertThat(packageRequest.getHeader("Accept")).isEqualTo("application/json");
|
||||
|
||||
// System.out.println(res.getCode());
|
||||
// System.out.println(new String(res.getContent(), StandardCharsets.UTF_8));
|
||||
assertTrue(res.getCode() != 400);
|
||||
}
|
||||
|
||||
public static Stream<Arguments> getRedirectArgs() {
|
||||
return Stream.of(
|
||||
Arguments.of(301, new String[]{"url1", "url2"}),
|
||||
Arguments.of(301, new String[]{"url1", "url2", "url3"}),
|
||||
Arguments.of(301, new String[]{"url1", "url2", "url3", "url4"}),
|
||||
Arguments.of(302, new String[]{"url1", "url2"}),
|
||||
Arguments.of(302, new String[]{"url1", "url2", "url3"}),
|
||||
Arguments.of(302, new String[]{"url1", "url2", "url3", "url4"}),
|
||||
Arguments.of(307, new String[]{"url1", "url2"}),
|
||||
Arguments.of(307, new String[]{"url1", "url2", "url3"}),
|
||||
Arguments.of(307, new String[]{"url1", "url2", "url3", "url4"}),
|
||||
Arguments.of(308, new String[]{"url1", "url2"}),
|
||||
Arguments.of(308, new String[]{"url1", "url2", "url3"}),
|
||||
Arguments.of(308, new String[]{"url1", "url2", "url3", "url4"})
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("getRedirectArgs")
|
||||
public void testRedirectsGet(int code, String[] urlArgs) throws IOException, InterruptedException {
|
||||
|
||||
HttpUrl[] urls = new HttpUrl[urlArgs.length];
|
||||
for (int i = 0; i < urlArgs.length; i++) {
|
||||
urls[i] = server.url(urlArgs[i]);
|
||||
if (i > 0) {
|
||||
server.enqueue(
|
||||
new MockResponse()
|
||||
.setResponseCode(code)
|
||||
.setBody("Pumas")
|
||||
.addHeader("Location", urls[i].url().toString()));
|
||||
}
|
||||
}
|
||||
server.enqueue(
|
||||
new MockResponse()
|
||||
.setBody("Monkeys").setResponseCode(200)
|
||||
);
|
||||
HttpUrl[] url = urls;
|
||||
|
||||
SimpleHTTPClient http = new SimpleHTTPClient();
|
||||
|
||||
HTTPResult res = http.get(url[0].url().toString(), "application/json");
|
||||
|
||||
assertThat(res.getCode()).isEqualTo(200);
|
||||
assertThat(res.getContentAsString()).isEqualTo("Monkeys");
|
||||
assertThat(server.getRequestCount()).isEqualTo(urlArgs.length);
|
||||
|
||||
for (int i = 0; i < urlArgs.length; i++) {
|
||||
RecordedRequest packageRequest = server.takeRequest();
|
||||
assertThat(packageRequest.getMethod()).isEqualTo("GET");
|
||||
assertThat(packageRequest.getHeader("Accept")).isEqualTo("application/json");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -3,6 +3,7 @@ package org.hl7.fhir.utilities;
|
|||
import org.hl7.fhir.utilities.filesystem.ManagedFileAccess;
|
||||
import org.junit.jupiter.api.*;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -23,6 +24,7 @@ import org.junit.jupiter.api.*;
|
|||
private static final String SAMPLE_CONTENT = "Line 1\nLine 2\nLine 3";
|
||||
private static final List<String> SAMPLE_CONTENT_LINES = List.of("Line 1", "Line 2", "Line 3");
|
||||
private static final String BOM = "\uFEFF";
|
||||
private static final byte[] BOM_BYTES = new byte[]{(byte)239, (byte)187, (byte)191};
|
||||
|
||||
private static File readFile;
|
||||
private final static List<File> createdFiles = new ArrayList<>(4);
|
||||
|
@ -104,6 +106,33 @@ import org.junit.jupiter.api.*;
|
|||
assertArrayEquals(SAMPLE_CONTENT.getBytes(StandardCharsets.UTF_8), read);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBytesToFile() throws IOException {
|
||||
final var writeFile = createTempFile();
|
||||
TextFile.bytesToFile(BOM_BYTES, writeFile);
|
||||
assertArrayEquals(BOM_BYTES, Files.readAllBytes(writeFile.toPath()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAppendBytesToFile() throws IOException {
|
||||
final var writeFile = createTempFile();
|
||||
TextFile.bytesToFile(BOM_BYTES, writeFile);
|
||||
assertArrayEquals(BOM_BYTES, Files.readAllBytes(writeFile.toPath()));
|
||||
|
||||
TextFile.appendBytesToFile(SAMPLE_CONTENT.getBytes(StandardCharsets.UTF_8), writeFile.getAbsolutePath());
|
||||
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream( );
|
||||
outputStream.write( BOM_BYTES );
|
||||
outputStream.write(new byte[] {13, 10}); //newline
|
||||
outputStream.write( SAMPLE_CONTENT.getBytes(StandardCharsets.UTF_8) );
|
||||
|
||||
byte[] expected = outputStream.toByteArray();
|
||||
|
||||
byte[] actual = Files.readAllBytes(writeFile.toPath());
|
||||
assertArrayEquals(expected, actual);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStringToFile() throws IOException {
|
||||
final var writeFile = createTempFile();
|
||||
|
|
|
@ -182,7 +182,7 @@ class UtilitiesTest {
|
|||
assertEquals("0.95", Utilities.lowBoundaryForDecimal("1.0", 2));
|
||||
assertEquals("-1.05000000", Utilities.lowBoundaryForDecimal("-1.0", 8));
|
||||
assertEquals("1.23", Utilities.lowBoundaryForDecimal("1.234", 2));
|
||||
assertEquals("1.57", Utilities.lowBoundaryForDecimal("1.567", 2));
|
||||
assertEquals("1.56", Utilities.lowBoundaryForDecimal("1.567", 2));
|
||||
|
||||
assertEquals("0.50000000", Utilities.highBoundaryForDecimal("0", 8));
|
||||
assertEquals("1.500000", Utilities.highBoundaryForDecimal("1", 6));
|
||||
|
|
|
@ -0,0 +1,227 @@
|
|||
package org.hl7.fhir.utilities.npm;
|
||||
|
||||
import org.hl7.fhir.utilities.TextFile;
|
||||
import org.hl7.fhir.utilities.filesystem.ManagedFileAccess;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
public class FilesystemPackageManagerLockTests {
|
||||
|
||||
public static final String DUMMY_PACKAGE = "dummy#1.2.3";
|
||||
String cachePath;
|
||||
File cacheDirectory;
|
||||
FilesystemPackageCacheManagerLocks filesystemPackageCacheLockManager;
|
||||
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() throws IOException {
|
||||
cachePath = ManagedFileAccess.fromPath(Files.createTempDirectory("fpcm-multithreadingTest")).getAbsolutePath();
|
||||
cacheDirectory = new File(cachePath);
|
||||
filesystemPackageCacheLockManager = new FilesystemPackageCacheManagerLocks(cacheDirectory);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBaseCases() throws IOException {
|
||||
filesystemPackageCacheLockManager.getCacheLock().doWriteWithLock(() -> {
|
||||
assertThat(cacheDirectory).exists();
|
||||
assertThat(cacheDirectory).isDirectory();
|
||||
assertThat(cacheDirectory).canWrite();
|
||||
assertThat(cacheDirectory).canRead();
|
||||
return null;
|
||||
});
|
||||
|
||||
|
||||
final FilesystemPackageCacheManagerLocks.PackageLock packageLock = filesystemPackageCacheLockManager.getPackageLock(DUMMY_PACKAGE);
|
||||
packageLock.doWriteWithLock(() -> {
|
||||
assertThat(packageLock.getLockFile()).exists();
|
||||
return null;
|
||||
});
|
||||
assertThat(packageLock.getLockFile()).doesNotExist();
|
||||
|
||||
packageLock.doReadWithLock(() -> {
|
||||
assertThat(packageLock.getLockFile()).doesNotExist();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
@Test void testNoPackageWriteOrReadWhileWholeCacheIsLocked() throws IOException, InterruptedException {
|
||||
final FilesystemPackageCacheManagerLocks.PackageLock packageLock = filesystemPackageCacheLockManager.getPackageLock(DUMMY_PACKAGE);
|
||||
|
||||
AtomicBoolean cacheLockFinished = new AtomicBoolean(false);
|
||||
List<Thread> threadList = new ArrayList<>();
|
||||
|
||||
Thread cacheThread = new Thread(() -> {
|
||||
try {
|
||||
filesystemPackageCacheLockManager.getCacheLock().doWriteWithLock(() -> {
|
||||
try {
|
||||
Thread.sleep(300);
|
||||
cacheLockFinished.set(true);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
cacheThread.start();
|
||||
Thread.sleep(100);
|
||||
for (int i = 0; i < 5; i++) {
|
||||
threadList.add(new Thread(() -> {
|
||||
try {
|
||||
packageLock.doWriteWithLock(() -> {
|
||||
assertThat(cacheLockFinished.get()).isTrue();
|
||||
return null;
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
threadList.add(new Thread(() -> {
|
||||
try {
|
||||
packageLock.doReadWithLock(() -> {
|
||||
assertThat(cacheLockFinished.get()).isTrue();
|
||||
return null;
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
for (Thread thread: threadList) {
|
||||
thread.start();
|
||||
}
|
||||
for (Thread thread: threadList) {
|
||||
try {
|
||||
thread.join();
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test void testSinglePackageWriteMultiPackageRead() throws IOException {
|
||||
final FilesystemPackageCacheManagerLocks.PackageLock packageLock = filesystemPackageCacheLockManager.getPackageLock(DUMMY_PACKAGE);
|
||||
AtomicInteger writeCounter = new AtomicInteger(0);
|
||||
|
||||
AtomicInteger readCounter = new AtomicInteger(0);
|
||||
List<Thread> threadList = new ArrayList<>();
|
||||
|
||||
AtomicInteger maxReadThreads = new AtomicInteger();
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
threadList.add(new Thread(() -> {
|
||||
try {
|
||||
packageLock.doWriteWithLock(() -> {
|
||||
int writeCount = writeCounter.incrementAndGet();
|
||||
assertThat(writeCount).isEqualTo(1);
|
||||
writeCounter.decrementAndGet();
|
||||
return null;
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
threadList.add(new Thread(() -> {
|
||||
try {
|
||||
packageLock.doReadWithLock(() -> {
|
||||
int readCount = readCounter.incrementAndGet();
|
||||
try {
|
||||
Thread.sleep(100);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
assertThat(readCount).isGreaterThan(0);
|
||||
if (readCount > maxReadThreads.get()) {
|
||||
maxReadThreads.set(readCount);
|
||||
}
|
||||
readCounter.decrementAndGet();
|
||||
return null;
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
for (Thread thread: threadList) {
|
||||
thread.start();
|
||||
}
|
||||
|
||||
for (Thread thread: threadList) {
|
||||
try {
|
||||
thread.join();
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(maxReadThreads.get()).isGreaterThan(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadWhenLockedByFileTimesOut() throws IOException {
|
||||
FilesystemPackageCacheManagerLocks shorterTimeoutManager = filesystemPackageCacheLockManager.withLockTimeout(3L, TimeUnit.SECONDS);
|
||||
final FilesystemPackageCacheManagerLocks.PackageLock packageLock = shorterTimeoutManager.getPackageLock(DUMMY_PACKAGE);
|
||||
File lockFile = createPackageLockFile();
|
||||
|
||||
Exception exception = assertThrows(IOException.class, () -> {
|
||||
packageLock.doReadWithLock(() -> {
|
||||
assertThat(lockFile).exists();
|
||||
return null;
|
||||
});
|
||||
});
|
||||
|
||||
assertThat(exception.getMessage()).contains("Error reading package");
|
||||
assertThat(exception.getCause().getMessage()).contains("Timeout waiting for lock file deletion: " + lockFile.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadWhenLockFileIsDeleted() throws IOException {
|
||||
FilesystemPackageCacheManagerLocks shorterTimeoutManager = filesystemPackageCacheLockManager.withLockTimeout(5L, TimeUnit.SECONDS);
|
||||
final FilesystemPackageCacheManagerLocks.PackageLock packageLock = shorterTimeoutManager.getPackageLock(DUMMY_PACKAGE);
|
||||
File lockFile = createPackageLockFile();
|
||||
|
||||
Thread t = new Thread(() -> {
|
||||
try {
|
||||
Thread.sleep(2000);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
lockFile.delete();
|
||||
});
|
||||
t.start();
|
||||
|
||||
packageLock.doReadWithLock(() -> {
|
||||
assertThat(lockFile).doesNotExist();
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private File createPackageLockFile() throws IOException {
|
||||
File lockFile = Path.of(cachePath, DUMMY_PACKAGE + ".lock").toFile();
|
||||
TextFile.stringToFile("", lockFile);
|
||||
return lockFile;
|
||||
}
|
||||
|
||||
}
|
|
@ -3,38 +3,48 @@ package org.hl7.fhir.utilities.npm;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import org.hl7.fhir.utilities.filesystem.ManagedFileAccess;
|
||||
import org.junit.jupiter.api.RepeatedTest;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.condition.DisabledOnOs;
|
||||
import org.junit.jupiter.api.condition.EnabledOnOs;
|
||||
import org.junit.jupiter.api.condition.OS;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
public class FilesystemPackageManagerTests {
|
||||
|
||||
private static final String DUMMY_URL_1 = "http://dummy1.org";
|
||||
private static final String DUMMY_URL_2 = "http://dummy2.org";
|
||||
private static final String DUMMY_URL_1 = "https://dummy1.org";
|
||||
private static final String DUMMY_URL_2 = "https://dummy2.org";
|
||||
|
||||
private static final String DUMMY_URL_3 = "http://dummy3.org";
|
||||
private static final String DUMMY_URL_3 = "https://dummy3.org";
|
||||
|
||||
private static final String DUMMY_URL_4 = "http://dummy4.org";
|
||||
private List<PackageServer> dummyPrivateServers = List.of(
|
||||
private static final String DUMMY_URL_4 = "https://dummy4.org";
|
||||
private final List<PackageServer> dummyPrivateServers = List.of(
|
||||
new PackageServer(DUMMY_URL_1),
|
||||
new PackageServer(DUMMY_URL_2)
|
||||
);
|
||||
|
||||
private List<PackageServer> dummyDefaultServers = List.of(
|
||||
private final List<PackageServer> dummyDefaultServers = List.of(
|
||||
new PackageServer(DUMMY_URL_3),
|
||||
new PackageServer(DUMMY_URL_4)
|
||||
);
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testDefaultServers() throws IOException {
|
||||
FilesystemPackageCacheManager filesystemPackageCacheManager = getFilesystemPackageCacheManager(false);
|
||||
|
@ -101,24 +111,56 @@ public class FilesystemPackageManagerTests {
|
|||
assertEquals( System.getenv("ProgramData") + "\\.fhir\\packages", folder.getAbsolutePath());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void multithreadingTest() throws IOException {
|
||||
/**
|
||||
We repeat the same tests multiple times here, in order to catch very rare edge cases.
|
||||
*/
|
||||
public static Stream<Arguments> packageCacheMultiThreadTestParams() {
|
||||
List<Arguments> params = new ArrayList<>();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
params.add(Arguments.of(100, 1));
|
||||
params.add(Arguments.of(10,10));
|
||||
params.add(Arguments.of(100, 10));
|
||||
}
|
||||
return params.stream();
|
||||
}
|
||||
|
||||
@MethodSource("packageCacheMultiThreadTestParams")
|
||||
@ParameterizedTest
|
||||
public void packageCacheMultiThreadTest(final int threadTotal, final int packageCacheManagerTotal) throws IOException {
|
||||
|
||||
String pcmPath = ManagedFileAccess.fromPath(Files.createTempDirectory("fpcm-multithreadingTest")).getAbsolutePath();
|
||||
FilesystemPackageCacheManager pcm = new FilesystemPackageCacheManager.Builder().withCacheFolder(pcmPath).build();
|
||||
FilesystemPackageCacheManager[] packageCacheManagers = new FilesystemPackageCacheManager[packageCacheManagerTotal];
|
||||
Random rand = new Random();
|
||||
|
||||
final AtomicInteger totalSuccessful = new AtomicInteger();
|
||||
|
||||
final ConcurrentHashMap successfulThreads = new ConcurrentHashMap();
|
||||
List<Thread> threads = new ArrayList<>();
|
||||
for (int i = 0; i < 3; i++) {
|
||||
for (int i = 0; i < threadTotal; i++) {
|
||||
final int index = i;
|
||||
Thread t = new Thread(() -> {
|
||||
try {
|
||||
pcm.loadPackage("hl7.fhir.xver-extensions#0.0.12");
|
||||
System.out.println("Thread #" + index + ": " + Thread.currentThread().getId() + " started");
|
||||
final int randomPCM = rand.nextInt(packageCacheManagerTotal);
|
||||
final int randomOperation = rand.nextInt(4);
|
||||
if (packageCacheManagers[randomPCM] == null) {
|
||||
packageCacheManagers[randomPCM] = new FilesystemPackageCacheManager.Builder().withCacheFolder(pcmPath).build();
|
||||
}
|
||||
FilesystemPackageCacheManager pcm = packageCacheManagers[randomPCM];
|
||||
if (randomOperation == 0) {
|
||||
pcm.addPackageToCache("example.fhir.uv.myig", "1.2.3", this.getClass().getResourceAsStream("/npm/dummy-package.tgz"), "https://packages.fhir.org/example.fhir.uv.myig/1.2.3");
|
||||
} else if (randomOperation == 1) {
|
||||
pcm.clear();
|
||||
} else if (randomOperation == 2) {
|
||||
pcm.loadPackageFromCacheOnly("example.fhir.uv.myig", "1.2.3");
|
||||
} else {
|
||||
pcm.removePackage("example.fhir.uv.myig", "1.2.3");
|
||||
}
|
||||
totalSuccessful.incrementAndGet();
|
||||
System.out.println("Thread " + index + " completed");
|
||||
successfulThreads.put(Thread.currentThread().getId(), index);
|
||||
System.out.println("Thread #" + index + ": " + Thread.currentThread().getId() + " completed");
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
System.err.println("Thread " + index + " failed");
|
||||
System.err.println("Thread #" + index + ": " + Thread.currentThread().getId() + " failed");
|
||||
}
|
||||
});
|
||||
t.start();
|
||||
|
@ -131,6 +173,17 @@ public class FilesystemPackageManagerTests {
|
|||
|
||||
}
|
||||
});
|
||||
assertEquals(3, totalSuccessful.get());
|
||||
|
||||
printUnsuccessfulThreads(successfulThreads, threads);
|
||||
assertEquals(threadTotal, totalSuccessful.get(), "Not all threads were successful.");
|
||||
|
||||
}
|
||||
|
||||
private void printUnsuccessfulThreads(final ConcurrentHashMap successfulThreads, List<Thread> threads) {
|
||||
for (Thread t : threads) {
|
||||
if (!successfulThreads.containsKey(t.getId())) {
|
||||
System.out.println("Thread #" + t.getId() + " failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -497,10 +497,12 @@ public class IgLoader implements IValidationEngineLoader {
|
|||
private Map<String, ByteProvider> fetchByPackage(String src, boolean loadInContext) throws FHIRException, IOException {
|
||||
NpmPackage pi;
|
||||
|
||||
InputStream stream = directProvider.fetchByPackage(src);
|
||||
if (stream != null) {
|
||||
pi = NpmPackage.fromPackage(stream);
|
||||
return loadPackage(pi, loadInContext);
|
||||
if (directProvider != null) {
|
||||
InputStream stream = directProvider.fetchByPackage(src);
|
||||
if (stream != null) {
|
||||
pi = NpmPackage.fromPackage(stream);
|
||||
return loadPackage(pi, loadInContext);
|
||||
}
|
||||
}
|
||||
String id = src;
|
||||
String version = null;
|
||||
|
|
|
@ -5730,13 +5730,15 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
|
||||
private boolean checkPublisherConsistency(ValidationContext valContext, List<ValidationMessage> errors, Element element, NodeStack stack, boolean contained) {
|
||||
|
||||
boolean ok = true;
|
||||
String pub = element.getNamedChildValue("publisher", false);
|
||||
|
||||
ok = rule(errors, "2024-08-15", IssueType.BUSINESSRULE, element.line(), element.col(), stack.getLiteralPath(), element.getExtensions(ToolingExtensions.EXT_WORKGROUP).size() <= 1, I18nConstants.VALIDATION_HL7_PUBLISHER_MULTIPLE_WGS) && ok;
|
||||
Base wgT = element.getExtensionValue(ToolingExtensions.EXT_WORKGROUP);
|
||||
String wg = wgT == null ? null : wgT.primitiveValue();
|
||||
String url = element.getNamedChildValue("url");
|
||||
|
||||
if (contained && wg == null) {
|
||||
boolean ok = true;
|
||||
Element container = valContext.getRootResource();
|
||||
if (element.hasExtension(ToolingExtensions.EXT_WORKGROUP)) {
|
||||
// container already specified the HL7 WG, so we don't need to test
|
||||
|
@ -5775,9 +5777,9 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
if (rule(errors, "2023-09-15", IssueType.BUSINESSRULE, element.line(), element.col(), stack.getLiteralPath(), wgd != null, I18nConstants.VALIDATION_HL7_WG_UNKNOWN, wg)) {
|
||||
String rpub = "HL7 International / "+wgd.getName();
|
||||
if (warning(errors, "2023-09-15", IssueType.BUSINESSRULE, element.line(), element.col(), stack.getLiteralPath(), pub != null, I18nConstants.VALIDATION_HL7_PUBLISHER_MISSING, wg, rpub)) {
|
||||
boolean ok = rpub.equals(pub);
|
||||
ok = rpub.equals(pub) && ok;
|
||||
if (!ok && wgd.getName2() != null) {
|
||||
ok = ("HL7 International / "+wgd.getName2()).equals(pub);
|
||||
ok = ("HL7 International / "+wgd.getName2()).equals(pub) && ok;
|
||||
warningOrError(pub.contains("/"), errors, "2023-09-15", IssueType.BUSINESSRULE, element.line(), element.col(), stack.getLiteralPath(), ok, I18nConstants.VALIDATION_HL7_PUBLISHER_MISMATCH2, wg, rpub, "HL7 International / "+wgd.getName2(), pub);
|
||||
} else {
|
||||
warningOrError(pub.contains("/"), errors, "2023-09-15", IssueType.BUSINESSRULE, element.line(), element.col(), stack.getLiteralPath(), ok, I18nConstants.VALIDATION_HL7_PUBLISHER_MISMATCH, wg, rpub, pub);
|
||||
|
@ -5785,14 +5787,14 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
}
|
||||
warning(errors, "2023-09-15", IssueType.BUSINESSRULE, element.line(), element.col(), stack.getLiteralPath(),
|
||||
Utilities.startsWithInList( wgd.getLink(), urls), I18nConstants.VALIDATION_HL7_WG_URL, wg, wgd.getLink());
|
||||
return true;
|
||||
return ok;
|
||||
}
|
||||
} else {
|
||||
return true; // HL7 sid.
|
||||
return ok; // HL7 sid.
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
return ok;
|
||||
}
|
||||
|
||||
private boolean statusCodesConsistent(String status, String standardsStatus) {
|
||||
|
|
|
@ -277,7 +277,7 @@ public class ValueSetValidator extends BaseValidator {
|
|||
}
|
||||
if (version == null) {
|
||||
CodeSystem cs = context.fetchCodeSystem(system);
|
||||
if (cs != null && !CodeSystemUtilities.isExemptFromMultipleVersionChecking(system)) {
|
||||
if (cs != null && !CodeSystemUtilities.isExemptFromMultipleVersionChecking(system) && fetcher != null) {
|
||||
Set<String> possibleVersions = fetcher.fetchCanonicalResourceVersions(null, valContext.getAppContext(), system);
|
||||
warning(errors, NO_RULE_DATE, IssueType.INVALID, stack, possibleVersions.size() <= 1, I18nConstants.TYPE_SPECIFIC_CHECKS_DT_CANONICAL_MULTIPLE_POSSIBLE_VERSIONS,
|
||||
system, cs.getVersion(), CommaSeparatedStringBuilder.join(", ", Utilities.sorted(possibleVersions)));
|
||||
|
|
|
@ -163,6 +163,7 @@ public class TxTester {
|
|||
return error;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean runSuite(JsonObject suite, ITerminologyClient tx, List<String> modes, String filter, JsonArray output) throws FHIRFormatError, FileNotFoundException, IOException {
|
||||
System.out.println("Group "+suite.asString("name"));
|
||||
JsonObject outputS = new JsonObject();
|
||||
|
@ -187,6 +188,7 @@ public class TxTester {
|
|||
if (output != null) {
|
||||
output.add(outputT);
|
||||
}
|
||||
long start = System.currentTimeMillis();
|
||||
Parameters profile = loadProfile(test);
|
||||
outputT.add("name", test.asString("name"));
|
||||
if (Utilities.noString(filter) || filter.equals("*") || test.asString("name").contains(filter)) {
|
||||
|
@ -219,7 +221,7 @@ public class TxTester {
|
|||
throw new Exception("Unknown Operation "+test.asString("operation"));
|
||||
}
|
||||
|
||||
System.out.println(msg == null ? "Pass" : "Fail");
|
||||
System.out.println((msg == null ? "Pass" : "Fail") + " ("+Utilities.describeDuration(System.currentTimeMillis() - start)+")");
|
||||
if (msg != null) {
|
||||
System.out.println(" "+msg);
|
||||
error = msg;
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
package org.hl7.fhir.validation.cli.services;
|
||||
|
||||
import static org.hl7.fhir.validation.tests.utilities.TestUtilities.getTerminologyCacheDirectory;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.AdditionalMatchers.and;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyBoolean;
|
||||
|
@ -38,7 +37,6 @@ import org.hl7.fhir.validation.ValidationEngine;
|
|||
import org.hl7.fhir.validation.cli.model.CliContext;
|
||||
import org.hl7.fhir.validation.cli.model.FileInfo;
|
||||
import org.hl7.fhir.validation.cli.model.ValidationRequest;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
|
@ -72,7 +70,7 @@ class ValidationServiceTests {
|
|||
Set<String> sessionIds = sessionCache.getSessionIds();
|
||||
if (sessionIds.stream().findFirst().isPresent()) {
|
||||
// Verify that after 1 run there is only one entry within the cache
|
||||
Assertions.assertEquals(1, sessionIds.size());
|
||||
assertEquals(1, sessionIds.size());
|
||||
myService.validateSources(request.setSessionId(sessionIds.stream().findFirst().get()));
|
||||
// Verify that the cache has been called on twice with the id created in the first run
|
||||
verify(sessionCache, Mockito.times(2)).fetchSessionValidatorEngine(sessionIds.stream().findFirst().get());
|
||||
|
@ -150,30 +148,26 @@ class ValidationServiceTests {
|
|||
|
||||
@Test
|
||||
@DisplayName("Test that conversion throws an Exception when no -output or -outputSuffix params are set")
|
||||
public void convertSingleSourceNoOutput() throws Exception {
|
||||
public void convertSingleSourceNoOutput() {
|
||||
SessionCache sessionCache = mock(SessionCache.class);
|
||||
ValidationService validationService = new ValidationService(sessionCache);
|
||||
ValidationEngine validationEngine = mock(ValidationEngine.class);
|
||||
|
||||
CliContext cliContext = getCliContextSingleSource();
|
||||
Exception exception = assertThrows( Exception.class, () -> {
|
||||
validationService.convertSources(cliContext,validationEngine);
|
||||
});
|
||||
assertThrows( Exception.class, () -> validationService.convertSources(cliContext,validationEngine));
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName("Test that conversion throws an Exception when multiple sources are set and an -output param is set")
|
||||
public void convertMultipleSourceOnlyOutput() throws Exception {
|
||||
public void convertMultipleSourceOnlyOutput() {
|
||||
SessionCache sessionCache = mock(SessionCache.class);
|
||||
ValidationService validationService = new ValidationService(sessionCache);
|
||||
ValidationEngine validationEngine = mock(ValidationEngine.class);
|
||||
|
||||
CliContext cliContext = getCliContextMultipleSource();
|
||||
assertThrows( Exception.class, () -> {
|
||||
validationService.convertSources(cliContext,validationEngine);
|
||||
}
|
||||
assertThrows( Exception.class, () -> validationService.convertSources(cliContext,validationEngine)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -208,28 +202,24 @@ class ValidationServiceTests {
|
|||
|
||||
@Test
|
||||
@DisplayName("Test that snapshot generation throws an Exception when no -output or -outputSuffix params are set")
|
||||
public void generateSnapshotSingleSourceNoOutput() throws Exception {
|
||||
public void generateSnapshotSingleSourceNoOutput() {
|
||||
SessionCache sessionCache = mock(SessionCache.class);
|
||||
ValidationService validationService = new ValidationService(sessionCache);
|
||||
ValidationEngine validationEngine = mock(ValidationEngine.class);
|
||||
|
||||
CliContext cliContext = getCliContextSingleSource();
|
||||
Exception exception = assertThrows( Exception.class, () -> {
|
||||
validationService.generateSnapshot(cliContext.setSv(DUMMY_SV),validationEngine);
|
||||
});
|
||||
assertThrows( Exception.class, () -> validationService.generateSnapshot(cliContext.setSv(DUMMY_SV),validationEngine));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Test that snapshot generation throws an Exception when multiple sources are set and an -output param is set")
|
||||
public void generateSnapshotMultipleSourceOnlyOutput() throws Exception {
|
||||
public void generateSnapshotMultipleSourceOnlyOutput() {
|
||||
SessionCache sessionCache = mock(SessionCache.class);
|
||||
ValidationService validationService = new ValidationService(sessionCache);
|
||||
ValidationEngine validationEngine = mock(ValidationEngine.class);
|
||||
|
||||
CliContext cliContext = getCliContextMultipleSource();
|
||||
assertThrows( Exception.class, () -> {
|
||||
validationService.generateSnapshot(cliContext.setOutput(DUMMY_OUTPUT).setSv(DUMMY_SV),validationEngine);
|
||||
}
|
||||
assertThrows( Exception.class, () -> validationService.generateSnapshot(cliContext.setOutput(DUMMY_OUTPUT).setSv(DUMMY_SV),validationEngine)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -284,7 +274,7 @@ class ValidationServiceTests {
|
|||
final ValidationEngine mockValidationEngine = mock(ValidationEngine.class);
|
||||
when(mockValidationEngine.getContext()).thenReturn(workerContext);
|
||||
|
||||
final ValidationEngine.ValidationEngineBuilder mockValidationEngineBuilder = mock(ValidationEngine.ValidationEngineBuilder.class);;
|
||||
final ValidationEngine.ValidationEngineBuilder mockValidationEngineBuilder = mock(ValidationEngine.ValidationEngineBuilder.class);
|
||||
final ValidationService validationService = createFakeValidationService(mockValidationEngineBuilder, mockValidationEngine);
|
||||
|
||||
CliContext cliContext = new CliContext();
|
||||
|
@ -302,7 +292,7 @@ class ValidationServiceTests {
|
|||
final ValidationEngine mockValidationEngine = mock(ValidationEngine.class);
|
||||
when(mockValidationEngine.getContext()).thenReturn(workerContext);
|
||||
|
||||
final ValidationEngine.ValidationEngineBuilder mockValidationEngineBuilder = mock(ValidationEngine.ValidationEngineBuilder.class);;
|
||||
final ValidationEngine.ValidationEngineBuilder mockValidationEngineBuilder = mock(ValidationEngine.ValidationEngineBuilder.class);
|
||||
final ValidationService validationService = createFakeValidationService(mockValidationEngineBuilder, mockValidationEngine);
|
||||
|
||||
CliContext cliContext = new CliContext();
|
||||
|
@ -323,18 +313,18 @@ class ValidationServiceTests {
|
|||
when(validationEngineBuilder.withUserAgent(anyString())).thenReturn(validationEngineBuilder);
|
||||
try {
|
||||
when(validationEngineBuilder.fromSource(isNull())).thenReturn(validationEngine);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (URISyntaxException e) {
|
||||
} catch (IOException | URISyntaxException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return validationEngineBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadIgsAndExtensions(ValidationEngine validationEngine, CliContext cliContext, TimeTracker timeTracker) throws IOException, URISyntaxException {
|
||||
protected void loadIgsAndExtensions(ValidationEngine validationEngine, CliContext cliContext, TimeTracker timeTracker) {
|
||||
//Don't care. Do nothing.
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
}
|
6
pom.xml
6
pom.xml
|
@ -14,14 +14,14 @@
|
|||
HAPI FHIR
|
||||
-->
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>6.3.21-SNAPSHOT</version>
|
||||
<version>6.3.23-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<properties>
|
||||
<commons_compress_version>1.26.0</commons_compress_version>
|
||||
<guava_version>32.0.1-jre</guava_version>
|
||||
<hapi_fhir_version>6.4.1</hapi_fhir_version>
|
||||
<validator_test_case_version>1.5.19</validator_test_case_version>
|
||||
<validator_test_case_version>1.5.20-SNAPSHOT</validator_test_case_version>
|
||||
<jackson_version>2.17.0</jackson_version>
|
||||
<junit_jupiter_version>5.9.2</junit_jupiter_version>
|
||||
<junit_platform_launcher_version>1.8.2</junit_platform_launcher_version>
|
||||
|
@ -32,7 +32,7 @@
|
|||
<lombok_version>1.18.32</lombok_version>
|
||||
<byte_buddy_version>1.14.8</byte_buddy_version>
|
||||
<apache_poi_version>5.2.1</apache_poi_version>
|
||||
<saxon_he_version>9.8.0-15</saxon_he_version>
|
||||
<saxon_he_version>11.6</saxon_he_version>
|
||||
<maven.compiler.release>11</maven.compiler.release>
|
||||
<maven.compiler.source>11</maven.compiler.source>
|
||||
<maven.compiler.target>11</maven.compiler.target>
|
||||
|
|
Loading…
Reference in New Issue