Merge pull request #418 from hapifhir/gg-r4b-work

Gg r4b work
This commit is contained in:
Grahame Grieve 2021-01-21 07:13:43 +11:00 committed by GitHub
commit ecc59f26a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 71 additions and 32 deletions

View File

@ -4,6 +4,19 @@
| :---: |
| [![Build Status][Badge-BuildPipeline]][Link-BuildPipeline] |
This is the core object handling code, with utilities (including validator), for the FHIR specification.
included in this repo:
* org.fhir.fhir.utilities: Shared code used by all the other projects - including the internationalization code
* org.fhir.fhir.r5: Object models and utilities for R5 candidate (will change regularly as new R5 candidates are released)
* org.fhir.fhir.r4: Object models and utilities for R4
* org.fhir.fhir.dstu3: Object models and utilities for STU3
* org.fhir.fhir.dstu2: Object models and utilities for STU2
* org.fhir.fhir.dstu2016may: Object models and utilities for an early STU3 candidate used by some implementers
* org.fhir.fhir.convertors: Code to convert between versions, and other version indepedence code - uses all the above projects
* org.fhir.fhir.validation: The FHIR Java validator
* org.fhir.fhir.validation.cli: Holder project for releasing the FHIR validator as as single fat jar (will be removed in the future)
### CI/CD
All integration and delivery done on Azure pipelines. Azure project can be viewed [here][Link-AzureProject].
@ -113,7 +126,7 @@ compile group: 'ca.uhn.hapi.fhir', name: 'hapi-fhir-structures-r5', version: '(l
```
### Maintenance
This project is maintained by [Grahame Grieve][Link-grahameGithub] and [James Agnew][Link-jamesGithub] on behalf of the FHIR community.
This project is maintained by [Grahame Grieve][Link-grahameGithub], [Grahame Grieve][Link-markGithub] and [James Agnew][Link-jamesGithub] on behalf of the FHIR community.
[Link-AzureProject]: https://dev.azure.com/fhir-pipelines/fhir-core-library
[Link-BuildPipeline]: https://dev.azure.com/fhir-pipelines/fhir-core-library/_build/latest?definitionId=29&branchName=master
@ -134,6 +147,7 @@ This project is maintained by [Grahame Grieve][Link-grahameGithub] and [James Ag
[Link-validationSonatypeRelease]: https://oss.sonatype.org/service/local/artifact/maven/redirect?r=releases&g=ca.uhn.hapi.fhir&a=org.hl7.fhir.validation&v=LATEST "Sonatype Release"
[Link-grahameGithub]: https://github.com/grahamegrieve
[Link-jamesGithub]: https://github.com/jamesagnew
[Link-markGithub]: https://github.com/markiantorno
[Link-Publishing]: https://github.com/FHIR/fhir-test-cases/wiki/Publishing-Binaries
[Link-PublishingRelease]: https://github.com/FHIR/fhir-test-cases/wiki/Detailed-Release-Instructions

View File

@ -165,7 +165,6 @@ public class JsonParser extends ParserBase {
Set<String> processed = new HashSet<String>();
if (hasResourceType)
processed.add("resourceType");
processed.add("fhir_comments");
// note that we do not trouble ourselves to maintain the wire format order here - we don't even know what it was anyway
// first pass: process the properties
@ -218,7 +217,7 @@ public class JsonParser extends ParserBase {
}
} else {
if (property.isList()) {
logError(line(e), col(e), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_AN_ARRAY_NOT_, describeType(e)), IssueSeverity.ERROR);
logError(line(e), col(e), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_AN_ARRAY_NOT_, describeType(e), name, path), IssueSeverity.ERROR);
}
parseChildComplexInstance(npath, object, element, property, name, e);
}
@ -247,7 +246,7 @@ public class JsonParser extends ParserBase {
else
parseChildren(npath, child, n, false);
} else
logError(line(e), col(e), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE__NOT_, (property.isList() ? "an Array" : "an Object"), describe(e)), IssueSeverity.ERROR);
logError(line(e), col(e), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE__NOT_, (property.isList() ? "an Array" : "an Object"), describe(e), name, npath), IssueSeverity.ERROR);
}
private String describe(JsonElement e) {
@ -273,11 +272,11 @@ public class JsonParser extends ParserBase {
if (property.isList()) {
boolean ok = true;
if (!(main == null || main instanceof JsonArray)) {
logError(line(main), col(main), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_AN_ARRAY_NOT_A_, describe(main)), IssueSeverity.ERROR);
logError(line(main), col(main), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_AN_ARRAY_NOT_, describe(main), name, path), IssueSeverity.ERROR);
ok = false;
}
if (!(fork == null || fork instanceof JsonArray)) {
logError(line(fork), col(fork), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_BASE_PROPERTY_MUST_BE_AN_ARRAY_NOT_A_, describe(main)), IssueSeverity.ERROR);
logError(line(fork), col(fork), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_BASE_PROPERTY_MUST_BE_AN_ARRAY_NOT_, describe(main), name, path), IssueSeverity.ERROR);
ok = false;
}
if (ok) {
@ -307,9 +306,9 @@ public class JsonParser extends ParserBase {
JsonElement main, JsonElement fork) throws FHIRException {
if (main != null && !(main instanceof JsonPrimitive))
logError(line(main), col(main), npath, IssueType.INVALID, context.formatMessage(
I18nConstants.THIS_PROPERTY_MUST_BE_AN_SIMPLE_VALUE_NOT_, describe(main)), IssueSeverity.ERROR);
I18nConstants.THIS_PROPERTY_MUST_BE_AN_SIMPLE_VALUE_NOT_, describe(main), name, npath), IssueSeverity.ERROR);
else if (fork != null && !(fork instanceof JsonObject))
logError(line(fork), col(fork), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_AN_OBJECT_NOT_, describe(fork)), IssueSeverity.ERROR);
logError(line(fork), col(fork), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_AN_OBJECT_NOT_, describe(fork), name, npath), IssueSeverity.ERROR);
else {
Element n = new Element(name, property).markLocation(line(main != null ? main : fork), col(main != null ? main : fork));
element.getChildren().add(n);

View File

@ -209,13 +209,13 @@ public class TurtleParser extends ParserBase {
// todo: check type
n.setValue(value);
} else
logError(object.getLine(), object.getCol(), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_A_LITERAL_NOT_A_, e.getClass().getName()), IssueSeverity.ERROR);
logError(object.getLine(), object.getCol(), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_A_LITERAL_NOT_, "a "+e.getClass().getName()), IssueSeverity.ERROR);
}
} else
parseChildren(src, npath, child, n, false);
} else
logError(object.getLine(), object.getCol(), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_A_URI_OR_BNODE_NOT_A_, e.getClass().getName()), IssueSeverity.ERROR);
logError(object.getLine(), object.getCol(), npath, IssueType.INVALID, context.formatMessage(I18nConstants.THIS_PROPERTY_MUST_BE_A_URI_OR_BNODE_NOT_, "a "+e.getClass().getName()), IssueSeverity.ERROR);
}

View File

@ -4,6 +4,7 @@ package org.hl7.fhir.r5.formats;
import java.io.FileInputStream;
import java.io.IOException;
/*
@ -43,6 +44,7 @@ import java.io.UnsupportedEncodingException;
import org.hl7.fhir.exceptions.FHIRFormatError;
import org.hl7.fhir.r5.model.DataType;
import org.hl7.fhir.r5.model.Resource;
import org.hl7.fhir.r5.model.SearchParameter;
import org.xmlpull.v1.XmlPullParserException;
@ -113,6 +115,8 @@ public interface IParser {
*/
public Resource parse(InputStream input) throws IOException, FHIRFormatError;
public Resource parseAndClose(InputStream input) throws IOException, FHIRFormatError;
/**
* parse content that is known to be a resource
* @throws UnsupportedEncodingException

View File

@ -36,7 +36,9 @@ package org.hl7.fhir.r5.formats;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.text.ParseException;
import java.util.HashMap;
@ -46,6 +48,7 @@ import org.apache.commons.codec.binary.Base64;
import org.hl7.fhir.exceptions.FHIRFormatError;
import org.hl7.fhir.r5.model.DataType;
import org.hl7.fhir.r5.model.Resource;
import org.hl7.fhir.r5.model.SearchParameter;
import org.hl7.fhir.utilities.Utilities;
public abstract class ParserBase extends FormatUtilities implements IParser {
@ -101,6 +104,7 @@ public abstract class ParserBase extends FormatUtilities implements IParser {
return bytes.toByteArray();
}
// -- Parser Configuration --------------------------------
protected String xhtmlMessage;
@ -233,5 +237,14 @@ public abstract class ParserBase extends FormatUtilities implements IParser {
return value;
}
@Override
public Resource parseAndClose(InputStream input) throws IOException, FHIRFormatError {
try {
return parse(input);
} finally {
input.close();
}
}
}

View File

@ -33,6 +33,7 @@ package org.hl7.fhir.r5.model;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r5.model.Enumerations.FHIRVersion;
import org.hl7.fhir.exceptions.FHIRException;
public class Enumerations {
@ -6478,6 +6479,9 @@ The primary difference between a medicationusage and a medicationadministration
* added to help the parsers
*/
NULL;
public static final FHIRVersion R4B = FHIRVersion._4_0_1;
public static FHIRVersion fromCode(String codeString) throws FHIRException {
if (codeString == null || "".equals(codeString))
return null;

View File

@ -80,9 +80,9 @@ public class TypesUtilities {
}
public static List<String> wildcardTypes() {
public static List<String> wildcardTypes(String version) {
List<String> res = new ArrayList<String>();
for (WildcardInformation wi : wildcards())
for (WildcardInformation wi : wildcards(version))
res.add(wi.getTypeName());
return res;
}
@ -94,7 +94,10 @@ public class TypesUtilities {
// Any of the IDMP data types
// You have to walk into them to profile them.
//
public static List<WildcardInformation> wildcards() {
public static List<WildcardInformation> wildcards(String version) {
if (version.startsWith("_")) {
throw new Error("underscore");
}
List<WildcardInformation> res = new ArrayList<WildcardInformation>();
// primitive types
@ -108,7 +111,9 @@ public class TypesUtilities {
res.add(new WildcardInformation("id", TypeClassification.PRIMITIVE));
res.add(new WildcardInformation("instant", TypeClassification.PRIMITIVE));
res.add(new WildcardInformation("integer", TypeClassification.PRIMITIVE));
res.add(new WildcardInformation("integer64", TypeClassification.PRIMITIVE));
if (!version.startsWith("4.0")) {
res.add(new WildcardInformation("integer64", TypeClassification.PRIMITIVE));
}
res.add(new WildcardInformation("markdown", TypeClassification.PRIMITIVE));
res.add(new WildcardInformation("oid", TypeClassification.PRIMITIVE));
res.add(new WildcardInformation("positiveInt", TypeClassification.PRIMITIVE));

View File

@ -80,6 +80,7 @@ public class FHIRToolingClient {
private static final int TIMEOUT_NORMAL = 1500;
private static final int TIMEOUT_OPERATION = 30000;
private static final int TIMEOUT_ENTRY = 500;
private static final int TIMEOUT_OPERATION_LONG = 60000;
private static final int TIMEOUT_OPERATION_EXPAND = 120000;
@ -132,7 +133,7 @@ public class FHIRToolingClient {
capabilities = (TerminologyCapabilities) client.issueGetResourceRequest(resourceAddress.resolveMetadataTxCaps(),
getPreferredResourceFormat(), "TerminologyCapabilities", TIMEOUT_NORMAL).getReference();
} catch (Exception e) {
handleException("An error has occurred while trying to fetch the server's terminology capabilities", e);
throw new FHIRException("Error fetching the server's terminology capabilities", e);
}
return capabilities;
}
@ -143,7 +144,7 @@ public class FHIRToolingClient {
conformance = (CapabilityStatement) client.issueGetResourceRequest(resourceAddress.resolveMetadataUri(false),
getPreferredResourceFormat(), "CapabilitiesStatement", TIMEOUT_NORMAL).getReference();
} catch (Exception e) {
handleException("An error has occurred while trying to fetch the server's conformance statement", e);
throw new FHIRException("Error fetching the server's conformance statement", e);
}
return conformance;
}
@ -154,7 +155,7 @@ public class FHIRToolingClient {
capabilities = (CapabilityStatement) client.issueGetResourceRequest(resourceAddress.resolveMetadataUri(true),
getPreferredResourceFormat(), "CapabilitiesStatement-Quick", TIMEOUT_NORMAL).getReference();
} catch (Exception e) {
handleException("An error fetching the server's capability statement: "+e.getMessage(), e);
throw new FHIRException("Error fetching the server's capability statement: "+e.getMessage(), e);
}
return capabilities;
}
@ -182,7 +183,7 @@ public class FHIRToolingClient {
throw new EFhirClientException("Server returned error code " + result.getHttpStatus(), (OperationOutcome) result.getPayload());
}
} catch (Exception e) {
handleException("An error has occurred while trying to read this version of the resource", e);
throw new FHIRException("Error trying to read this version of the resource", e);
}
return result.getPayload();
}
@ -291,7 +292,7 @@ public class FHIRToolingClient {
public Bundle transaction(Bundle batch) {
Bundle transactionResult = null;
try {
transactionResult = client.postBatchRequest(resourceAddress.getBaseServiceUri(), ByteUtils.resourceToByteArray(batch, false, isJson(getPreferredResourceFormat())), getPreferredResourceFormat(), "transaction", TIMEOUT_NORMAL + batch.getEntry().size());
transactionResult = client.postBatchRequest(resourceAddress.getBaseServiceUri(), ByteUtils.resourceToByteArray(batch, false, isJson(getPreferredResourceFormat())), getPreferredResourceFormat(), "transaction", TIMEOUT_OPERATION + (TIMEOUT_ENTRY * batch.getEntry().size()));
} catch (Exception e) {
handleException("An error occurred trying to process this transaction request", e);
}

View File

@ -429,17 +429,16 @@ public class I18nConstants {
public static final String TERMINOLOGY_TX_NOSVC_BOUND_EXT = "TERMINOLOGY_TX_NOSVC_BOUND_EXT";
public static final String TEXT_SHOULD_NOT_BE_PRESENT = "Text_should_not_be_present";
public static final String THE_BASE_SNAPSHOT_MARKS_A_SLICING_AS_CLOSED_BUT_THE_DIFFERENTIAL_TRIES_TO_EXTEND_IT_IN__AT__ = "The_base_snapshot_marks_a_slicing_as_closed_but_the_differential_tries_to_extend_it_in__at__";
public static final String THIS_BASE_PROPERTY_MUST_BE_AN_ARRAY_NOT_A_ = "This_base_property_must_be_an_Array_not_a_";
public static final String THIS_BASE_PROPERTY_MUST_BE_AN_ARRAY_NOT_ = "This_base_property_must_be_an_Array_not_";
public static final String THIS_CANNOT_BE_PARSED_AS_A_FHIR_OBJECT_NO_NAME = "This_cannot_be_parsed_as_a_FHIR_object_no_name";
public static final String THIS_DOES_NOT_APPEAR_TO_BE_A_FHIR_RESOURCE_UNKNOWN_NAMESPACENAME_ = "This_does_not_appear_to_be_a_FHIR_resource_unknown_namespacename_";
public static final String THIS_DOES_NOT_APPEAR_TO_BE_A_FHIR_RESOURCE_UNKNOWN_NAME_ = "This_does_not_appear_to_be_a_FHIR_resource_unknown_name_";
public static final String THIS_ELEMENT_DOES_NOT_MATCH_ANY_KNOWN_SLICE_ = "This_element_does_not_match_any_known_slice_";
public static final String THIS_PROPERTY_MUST_BE_AN_ARRAY_NOT_ = "This_property_must_be_an_Array_not_";
public static final String THIS_PROPERTY_MUST_BE_AN_ARRAY_NOT_A_ = "This_property_must_be_an_Array_not_a_";
public static final String THIS_PROPERTY_MUST_BE_AN_OBJECT_NOT_ = "This_property_must_be_an_object_not_";
public static final String THIS_PROPERTY_MUST_BE_AN_SIMPLE_VALUE_NOT_ = "This_property_must_be_an_simple_value_not_";
public static final String THIS_PROPERTY_MUST_BE_A_LITERAL_NOT_A_ = "This_property_must_be_a_Literal_not_a_";
public static final String THIS_PROPERTY_MUST_BE_A_URI_OR_BNODE_NOT_A_ = "This_property_must_be_a_URI_or_bnode_not_a_";
public static final String THIS_PROPERTY_MUST_BE_A_LITERAL_NOT_ = "This_property_must_be_a_Literal_not_";
public static final String THIS_PROPERTY_MUST_BE_A_URI_OR_BNODE_NOT_ = "This_property_must_be_a_URI_or_bnode_not_";
public static final String THIS_PROPERTY_MUST_BE__NOT_ = "This_property_must_be__not_";
public static final String THIS__CANNOT_BE_PARSED_AS_A_FHIR_OBJECT_NO_NAMESPACE = "This__cannot_be_parsed_as_a_FHIR_object_no_namespace";
public static final String TYPE_CHECKS_FIXED_CC = "TYPE_CHECKS_FIXED_CC";

View File

@ -367,10 +367,10 @@ Error_parsing_JSON_the_primitive_value_must_be_a_string = Error parsing JSON: th
Error_parsing_JSON_the_primitive_value_must_be_a_number = Error parsing JSON: the primitive value must be a number
Error_parsing_JSON_the_primitive_value_must_be_a_boolean = Error parsing JSON: the primitive value must be a boolean
Error_parsing_XHTML_ = Error parsing XHTML: {0}
This_property_must_be_an_object_not_ = This property must be an object, not {0}
This_property_must_be_an_simple_value_not_ = This property must be an simple value, not {0}
This_property_must_be__not_ = This property must be {0}, not {1}
This_property_must_be_an_Array_not_ = This property must be an Array, not {0}
This_property_must_be_an_object_not_ = This property must be an object, not {0} ({1} at {2})
This_property_must_be_an_simple_value_not_ = This property must be an simple value, not {0} ({1} at {2})
This_property_must_be__not_ = The property {2} must be {0}, not {1} (at {3})
This_property_must_be_an_Array_not_ = The property {1} must be a JSON Array, not {0} (at {2})
Unrecognised_property_ = Unrecognised property ''@{0}''
Object_must_have_some_content = Object must have some content
Error_parsing_JSON_ = Error parsing JSON: {0}
@ -384,8 +384,8 @@ Element_must_have_some_content = Element must have some content
No_processing_instructions_allowed_in_resources = No processing instructions allowed in resources
Unknown_resource_type_missing_rdfstype = Unknown resource type (missing rdfs:type)
reference_to__cannot_be_resolved = reference to {0} cannot be resolved
This_property_must_be_a_URI_or_bnode_not_a_ = This property must be a URI or bnode, not a {0}
This_property_must_be_a_Literal_not_a_ = This property must be a Literal, not a {0}
This_property_must_be_a_URI_or_bnode_not_ = This property must be a URI or bnode, not {0}
This_property_must_be_a_Literal_not_ = This property must be a Literal, not {0}
Unrecognised_predicate_ = Unrecognised predicate ''{0}''
Error_parsing_Turtle_ = Error parsing Turtle: {0}
Unexpected_datatype_for_rdfstype = Unexpected datatype for rdfs:type
@ -437,8 +437,8 @@ Unable_to_resolve_system__value_set_expansion_has_multiple_systems = Unable to r
Unable_to_resolve_system__value_set_has_no_includes_or_expansion = Unable to resolve system - value set has no includes or expansion
Unable_to_resolve_system__value_set_has_excludes = Unable to resolve system - value set has excludes
Unable_to_resolve_system__no_value_set = Unable to resolve system - no value set
This_base_property_must_be_an_Array_not_a_ = This base property must be an Array, not a {0}
This_property_must_be_an_Array_not_a_ = This property must be an Array, not a {0}
This_base_property_must_be_an_Array_not_ = This base property must be an Array, not {0}
This_property_must_be_an_Array_not_ = This property must be an Array, not {0}
documentmsg = (document)
xml_attr_value_invalid = The XML Attribute {0} has an illegal character
xml_encoding_invalid = The XML encoding is invalid (must be UTF-8)

View File

@ -19,7 +19,7 @@
<properties>
<hapi_fhir_version>5.1.0</hapi_fhir_version>
<validator_test_case_version>1.1.56</validator_test_case_version>
<validator_test_case_version>1.1.57-SNAPSHOT</validator_test_case_version>
<junit_jupiter_version>5.6.2</junit_jupiter_version>
<maven_surefire_version>3.0.0-M4</maven_surefire_version>
<jacoco_version>0.8.5</jacoco_version>