Merge branch 'master' into do-20230602-refactor-cli-params

This commit is contained in:
dotasek 2023-06-14 10:55:48 -04:00
commit 1afcd224f7
38 changed files with 491 additions and 183 deletions

73
.github/workflows/codeql.yml vendored Normal file
View File

@ -0,0 +1,73 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
jobs:
analyze:
name: Analyze
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'java' ]
module: [ 'org.hl7.fhir.utilities','org.hl7.fhir.dstu2','org.hl7.fhir.dstu2016may', 'org.hl7.fhir.dstu3', 'org.hl7.fhir.r4', 'org.hl7.fhir.r4b','org.hl7.fhir.r5','org.hl7.fhir.convertors','org.hl7.fhir.validation' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Use only 'java' to analyze code written in Java, Kotlin or both
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
# - name: Autobuild
# uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
- run: |
mvn install -DskipTests -pl ${{ matrix.module }}
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}:${{ matrix.module }}"

35
.github/workflows/trivy.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: Trivy Security Scans
on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]
workflow_dispatch:
jobs:
build:
name: build
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Run static analysis
uses: aquasecurity/trivy-action@master
with:
scan-type: 'fs'
vuln-type: 'library'
scanners: 'vuln,secret,config'
ignore-unfixed: true
format: 'sarif'
output: 'trivy-results.sarif'
severity: 'MEDIUM,HIGH,CRITICAL'
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: 'trivy-results.sarif'
category: 'code'

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -91,7 +91,6 @@
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<optional>true</optional>
</dependency>

View File

@ -29,8 +29,9 @@ public class PackageMinifier {
for (PackageResourceInformation pri : src.listIndexedResources()) {
if (min.isMinified(pri.getResourceType())) {
Resource res = new JsonParser().parse(src.load(pri));
min.minify(res);
tgt.addFile("package", res.fhirType()+"-"+res.getIdPart()+".json", new JsonParser().composeBytes(res), null);
if (min.minify(res)) {
tgt.addFile("package", res.fhirType()+"-"+res.getIdPart()+".json", new JsonParser().composeBytes(res), null);
}
}
}
tgt.save(new FileOutputStream(target));

View File

@ -91,7 +91,9 @@ public class ResourceDependencyPackageBuilder {
ResourceMinifier min = new ResourceMinifier();
if (min.isMinified(resource.fhirType())) {
resource = resource.copy();
min.minify(resource);
if (!min.minify(resource)) {
return;
}
} else {
return;
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -59,25 +59,16 @@
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>ooxml-schemas</artifactId>
<version>1.4</version>
<artifactId>poi-ooxml-full</artifactId>
<optional>true</optional>
</dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -73,25 +73,16 @@
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>ooxml-schemas</artifactId>
<version>1.4</version>
<artifactId>poi-ooxml-full</artifactId>
<optional>true</optional>
</dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -59,7 +59,6 @@
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<optional>true</optional>
</dependency>
@ -67,25 +66,16 @@
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>ooxml-schemas</artifactId>
<version>1.4</version>
<artifactId>poi-ooxml-full</artifactId>
<optional>true</optional>
</dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -71,7 +71,6 @@
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<optional>true</optional>
</dependency>
<dependency>
@ -80,29 +79,21 @@
<version>${validator_test_case_version}</version>
<scope>test</scope>
</dependency>
<!-- Apache POI -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>ooxml-schemas</artifactId>
<version>1.4</version>
<artifactId>poi-ooxml-full</artifactId>
<optional>true</optional>
</dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -65,7 +65,6 @@
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<optional>true</optional>
</dependency>
<dependency>
@ -86,25 +85,16 @@
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>ooxml-schemas</artifactId>
<version>1.4</version>
<artifactId>poi-ooxml-full</artifactId>
<optional>true</optional>
</dependency>

View File

@ -8,7 +8,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.xmlbeans.xml.stream.ReferenceResolver;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.r4b.context.IWorkerContext;
import org.hl7.fhir.r4b.model.Base;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -73,25 +73,16 @@
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>${apache_poi_version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>ooxml-schemas</artifactId>
<version>1.4</version>
<artifactId>poi-ooxml-full</artifactId>
<optional>true</optional>
</dependency>
@ -116,7 +107,6 @@
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<optional>true</optional>
</dependency>
<dependency>

View File

@ -123,16 +123,20 @@ public class R5ExtensionsLoader {
if (Utilities.existsInList(lsd.info.getId(), types)) {
StructureDefinition sd = lsd.getResource();
count++;
List<ElementDefinition> rl = new ArrayList<>();
for (ElementDefinition ed : sd.getDifferential().getElement()) {
if (!stripTypes(ed, sd, types)) {
System.out.println("A problem...");
rl.add(ed);
}
}
sd.getDifferential().getElement().removeAll(rl);
rl.clear();
for (ElementDefinition ed : sd.getSnapshot().getElement()) {
if (!stripTypes(ed, sd, types)) {
System.out.println("A problem...");
rl.add(ed);
}
}
sd.getSnapshot().getElement().removeAll(rl);
sd.setWebPath(Utilities.pathURL(lsd.source.getWebLocation(), sd.getId().toLowerCase()+".html"));
registerTerminologies(sd);
context.cacheResourceFromPackage(sd, new PackageInformation(lsd.source));

View File

@ -157,8 +157,7 @@ public class ProfilePathProcessor {
* @throws DefinitionException, FHIRException
* @throws Exception
*/
private ElementDefinition processPaths(
final ProfilePathProcessorState cursors) throws FHIRException {
private ElementDefinition processPaths(final ProfilePathProcessorState cursors) throws FHIRException {
debugProcessPathsEntry(cursors);
ElementDefinition res = null;
List<TypeSlice> typeList = new ArrayList<>();
@ -170,7 +169,6 @@ public class ProfilePathProcessor {
debugProcessPathsIteration(cursors, currentBasePath);
List<ElementDefinition> diffMatches = profileUtilities.getDiffMatches(getDifferential(), currentBasePath, cursors.diffCursor, getDiffLimit(), getProfileName()); // get a list of matching elements in scope
// in the simple case, source is not sliced.
if (!currentBase.hasSlicing() || currentBasePath.equals(getSlicing().getPath()))
{
@ -253,9 +251,8 @@ public class ProfilePathProcessor {
.incrementDebugIndent()
.withBaseLimit(newBaseLimit)
.withDiffLimit(newDiffLimit)
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, 0)).withSlicing(new PathSlicingParams(true, null, null)).
processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor,
cursors.contextName, cursors.resultPathBase));
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, 0)).withSlicing(new PathSlicingParams(true, null, null))
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase));
if (e == null)
throw new FHIRException(profileUtilities.getContext().formatMessage(I18nConstants.DID_NOT_FIND_SINGLE_SLICE_, diffMatches.get(0).getPath()));
e.setSlicing(diffMatches.get(0).getSlicing());
@ -267,9 +264,10 @@ public class ProfilePathProcessor {
outcome.setPath(profileUtilities.fixedPathDest(getContextPathTarget(), outcome.getPath(), getRedirector(), getContextPathSource()));
profileUtilities.updateFromBase(outcome, currentBase, getSourceStructureDefinition().getUrl());
if (!diffMatches.get(0).hasSlicing())
if (!diffMatches.get(0).hasSlicing()) {
outcome.setSlicing(profileUtilities.makeExtensionSlicing());
else {
outcome.setUserData("auto-added-slicing", true);
} else {
outcome.setSlicing(diffMatches.get(0).getSlicing().copy());
for (int i = 1; i < diffMatches.size(); i++) {
if (diffMatches.get(i).hasSlicing()) {
@ -348,7 +346,8 @@ public class ProfilePathProcessor {
.withBaseLimit(newBaseLimit)
.withDiffLimit(newDiffLimit)
.withProfileName(getProfileName() + profileUtilities.pathTail(diffMatches, i))
.withSlicing(new PathSlicingParams(true, slicerElement, null)).processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase));
.withSlicing(new PathSlicingParams(true, slicerElement, null))
.processPaths(new ProfilePathProcessorState(cursors.base, cursors.baseCursor, newDiffCursor, cursors.contextName, cursors.resultPathBase));
}
// ok, done with that - next in the base list
cursors.baseCursor = newBaseLimit + 1;

View File

@ -777,17 +777,21 @@ public class ProfileUtilities extends TranslatingUtilities {
int count = slice.checkMin();
boolean repeats = !"1".equals(slice.getFocus().getBase().getMax()); // type slicing if repeats = 1
if (count > -1 && repeats) {
String msg = "The slice definition for "+slice.getFocus().getId()+" has a minimum of "+slice.getFocus().getMin()+" but the slices add up to a minimum of "+count;
messages.add(new ValidationMessage(Source.ProfileValidator, ValidationMessage.IssueType.VALUE, url+"#"+ed.getId(), msg, forPublication ? ValidationMessage.IssueSeverity.ERROR : ValidationMessage.IssueSeverity.INFORMATION));
if (slice.getFocus().hasUserData("auto-added-slicing")) {
slice.getFocus().setMin(count);
} else {
String msg = "The slice definition for "+slice.getFocus().getId()+" has a minimum of "+slice.getFocus().getMin()+" but the slices add up to a minimum of "+count;
messages.add(new ValidationMessage(Source.ProfileValidator, ValidationMessage.IssueType.VALUE, url+"#"+slice.getFocus().getId(), msg, forPublication ? ValidationMessage.IssueSeverity.ERROR : ValidationMessage.IssueSeverity.INFORMATION));
}
}
count = slice.checkMax();
if (count > -1 && repeats) {
String msg = "The slice definition for "+slice.getFocus().getId()+" has a maximum of "+slice.getFocus().getMax()+" but the slices add up to a maximum of "+count+". Check that this is what is intended";
messages.add(new ValidationMessage(Source.ProfileValidator, ValidationMessage.IssueType.VALUE, url+"#"+ed.getId(), msg, ValidationMessage.IssueSeverity.INFORMATION));
messages.add(new ValidationMessage(Source.ProfileValidator, ValidationMessage.IssueType.VALUE, url+"#"+slice.getFocus().getId(), msg, ValidationMessage.IssueSeverity.INFORMATION));
}
if (!slice.checkMinMax()) {
String msg = "The slice definition for "+slice.getFocus().getId()+" has a maximum of "+slice.getFocus().getMax()+" which is less than the minimum of "+slice.getFocus().getMin();
messages.add(new ValidationMessage(Source.ProfileValidator, ValidationMessage.IssueType.VALUE, url+"#"+ed.getId(), msg, ValidationMessage.IssueSeverity.WARNING));
messages.add(new ValidationMessage(Source.ProfileValidator, ValidationMessage.IssueType.VALUE, url+"#"+slice.getFocus().getId(), msg, ValidationMessage.IssueSeverity.WARNING));
}
slices.remove(s);
}

View File

@ -12,6 +12,8 @@ import org.hl7.fhir.r5.context.IWorkerContext;
import org.hl7.fhir.r5.model.CodeSystem;
import org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionDesignationComponent;
import org.hl7.fhir.r5.model.CodeSystem.ConceptPropertyComponent;
import org.hl7.fhir.r5.model.DataType;
import org.hl7.fhir.r5.model.Resource;
import org.hl7.fhir.r5.terminologies.CodeSystemUtilities;
import org.hl7.fhir.utilities.TextFile;
@ -36,7 +38,7 @@ import org.hl7.fhir.utilities.i18n.LanguageFileProducer.TranslationUnit;
*/
public class LanguageUtils {
public static final List<String> TRANSLATION_SUPPLEMENT_RESOURCE_TYPES = Arrays.asList("CodeSystem", "StructureDefinition");
public static final List<String> TRANSLATION_SUPPLEMENT_RESOURCE_TYPES = Arrays.asList("CodeSystem", "StructureDefinition", "Questionnaire");
private static final String ORPHAN_TRANSLATIONS_NAME = "translations.orphans";
@ -64,7 +66,7 @@ public class LanguageUtils {
if (translation == null) {
translation = element.getTranslation(langSession.getTargetLang());
}
langSession.entry(new TextUnit(pathForElement(element), base, translation));
langSession.entry(new TextUnit(pathForElement(element), contextForElement(element), base, translation));
}
}
for (Element c: element.getChildren()) {
@ -75,6 +77,10 @@ public class LanguageUtils {
}
private String contextForElement(Element element) {
throw new Error("Not done yet");
}
private String getSpecialTranslation(Element parent, Element element, String targetLang) {
if (parent == null) {
return null;
@ -188,7 +194,7 @@ public class LanguageUtils {
private Set<TranslationUnit> findTranslations(String path, String src, Set<TranslationUnit> translations) {
Set<TranslationUnit> res = new HashSet<>();
for (TranslationUnit translation : translations) {
if (path.equals(translation.getContext()) && src.equals(translation.getSrcText())) {
if (path.equals(translation.getId()) && src.equals(translation.getSrcText())) {
res.add(translation);
}
}
@ -202,7 +208,7 @@ public class LanguageUtils {
public static void fillSupplement(CodeSystem cs, List<TranslationUnit> list) {
cs.setUserData(SUPPLEMENT_NAME, "true");
for (TranslationUnit tu : list) {
ConceptDefinitionComponent cd = CodeSystemUtilities.getCode(cs, tu.getContext());
ConceptDefinitionComponent cd = CodeSystemUtilities.getCode(cs, tu.getId());
if (cd != null && cd.hasDisplay() && cd.getDisplay().equals(tu.getSrcText())) {
cd.addDesignation().setLanguage(tu.getLanguage()).setValue(tu.getTgtText());
} else {
@ -273,9 +279,18 @@ public class LanguageUtils {
target = d.getValue();
}
}
list.add(new TranslationUnit(lang, code, display, target));
list.add(new TranslationUnit(lang, code, getDefinition(cd), display, target));
for (ConceptDefinitionComponent cd1 : cd.getConcept()) {
generateTranslations(list, cd1, lang);
}
}
private static String getDefinition(ConceptDefinitionComponent cd) {
ConceptPropertyComponent v = CodeSystemUtilities.getProperty(cd, "translation-context");
if (v != null && v.hasValue()) {
return v.getValue().primitiveValue();
} else {
return cd.getDefinition();
}
}
}

View File

@ -129,7 +129,7 @@ public abstract class ParserBase {
if (res == null) {
throw new FHIRException("Parsing FHIR content failed: "+errors.get(0).summary());
} else if (res.size() == 0) {
throw new FHIRException("Parsing FHIR content returned no elements in a context where one element is required");
throw new FHIRException("Parsing FHIR content returned no elements in a context where one element is required because: "+errors.get(0).summary());
}
if (res.size() != 1) {
throw new FHIRException("Parsing FHIR content returned multiple elements in a context where only one element is allowed");

View File

@ -665,16 +665,16 @@ public class ValueSetValidator {
if (code.getDisplay() == null) {
return new ValidationResult(code.getSystem(), cs.getVersion(), cc, vc.getDisplay());
}
CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder();
CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder(", ", " or ");
if (cc.hasDisplay() && isOkLanguage(cs.getLanguage())) {
b.append(cc.getDisplay());
b.append("'"+cc.getDisplay()+"'");
if (code.getDisplay().equalsIgnoreCase(cc.getDisplay())) {
return new ValidationResult(code.getSystem(), cs.getVersion(), cc, getPreferredDisplay(cc, cs));
}
}
for (ConceptDefinitionDesignationComponent ds : cc.getDesignation()) {
if (isOkLanguage(ds.getLanguage())) {
b.append(ds.getValue());
b.append("'"+ds.getValue()+"'");
if (code.getDisplay().equalsIgnoreCase(ds.getValue())) {
return new ValidationResult(code.getSystem(),cs.getVersion(), cc, getPreferredDisplay(cc, cs));
}
@ -685,14 +685,14 @@ public class ValueSetValidator {
ConceptReferencePair vs = findValueSetRef(code.getSystem(), code.getCode());
if (vs != null && (vs.getCc().hasDisplay() ||vs.getCc().hasDesignation())) {
if (vs.getCc().hasDisplay() && isOkLanguage(vs.getValueset().getLanguage())) {
b.append(vs.getCc().getDisplay());
b.append("'"+vs.getCc().getDisplay()+"'");
if (code.getDisplay().equalsIgnoreCase(vs.getCc().getDisplay())) {
return new ValidationResult(code.getSystem(), cs.getVersion(), cc, getPreferredDisplay(cc, cs));
}
}
for (ConceptReferenceDesignationComponent ds : vs.getCc().getDesignation()) {
if (isOkLanguage(ds.getLanguage())) {
b.append(ds.getValue());
b.append("'"+ds.getValue()+"'");
if (code.getDisplay().equalsIgnoreCase(ds.getValue())) {
return new ValidationResult(code.getSystem(), cs.getVersion(), cc, getPreferredDisplay(cc, cs));
}

View File

@ -0,0 +1,45 @@
package org.hl7.fhir.r5.utils;
import org.hl7.fhir.r5.model.Base;
import org.hl7.fhir.r5.model.Element;
import org.hl7.fhir.r5.model.Property;
import org.hl7.fhir.r5.model.Resource;
public class ElementVisitor {
public interface IElementVisitor {
public void visit(Resource resource);
public void visit(Element element);
}
private IElementVisitor visitor;
public ElementVisitor(IElementVisitor visitor) {
this.visitor = visitor;
}
private void visitBase(Base base) {
for (Property p : base.children()) {
if (p.hasValues()) {
for (Base b : p.getValues()) {
if (b instanceof Resource) {
visit((Resource) b);
} else {
visit((Element) b);
}
}
}
}
}
public void visit(Resource res) {
visitor.visit(res);
visitBase(res);
}
public void visit(Element e) {
visitor.visit(e);
visitBase(e);
}
}

View File

@ -77,7 +77,7 @@ public class ResourceLanguageFileBuilder {
String ppath = path+"."+p.getName()+(p.isList() ? "["+i+"]" : "");
i++;
if (isTranslatable(p, b, pid)) {
sess.entry(new TextUnit(ppath, b.primitiveValue(), getTranslation(b, target)));
sess.entry(new TextUnit(ppath, getContext(), b.primitiveValue(), getTranslation(b, target)));
}
for (Property pp : b.children()) {
process(sess, pp, pid, ppath);
@ -86,6 +86,11 @@ public class ResourceLanguageFileBuilder {
}
}
private String getContext() {
throw new Error("not done yet");
}
private boolean isTranslatable(Property p, Base b, String id) {
if (new ContextUtilities(context).isPrimitiveDatatype(b.fhirType())) { // never any translations for non-primitives
ElementDefinition ed = null;

View File

@ -32,7 +32,9 @@ import org.hl7.fhir.r5.model.OperationDefinition.OperationDefinitionParameterCom
import org.hl7.fhir.r5.model.Questionnaire;
import org.hl7.fhir.r5.model.Questionnaire.QuestionnaireItemComponent;
import org.hl7.fhir.r5.model.Resource;
import org.hl7.fhir.r5.model.SearchParameter;
import org.hl7.fhir.r5.model.StructureDefinition;
import org.hl7.fhir.r5.model.StructureDefinition.StructureDefinitionKind;
import org.hl7.fhir.r5.model.ValueSet;
import org.hl7.fhir.utilities.Utilities;
@ -46,33 +48,32 @@ public class ResourceMinifier {
"ConceptMap", "NamingSystem", "OperationDefinition", "SearchParameter", "Questionnaire");
}
public void minify(Resource res) {
public boolean minify(Resource res) {
if (res instanceof StructureDefinition) {
minifySD((StructureDefinition) res);
}
if (res instanceof ValueSet) {
return minifySD((StructureDefinition) res);
} else if (res instanceof ValueSet) {
minifyVS((ValueSet) res);
}
if (res instanceof CodeSystem) {
} else if (res instanceof CodeSystem) {
minifyCS((CodeSystem) res);
}
if (res instanceof CapabilityStatement) {
} else if (res instanceof CapabilityStatement) {
minifyCS((CapabilityStatement) res);
}
if (res instanceof ConceptMap) {
} else if (res instanceof ConceptMap) {
minifyCM((ConceptMap) res);
}
if (res instanceof NamingSystem) {
} else if (res instanceof NamingSystem) {
minifyNS((NamingSystem) res);
}
if (res instanceof OperationDefinition) {
} else if (res instanceof OperationDefinition) {
minifyOD((OperationDefinition) res);
}
if (res instanceof Questionnaire) {
} else if (res instanceof Questionnaire) {
minifyQ((Questionnaire) res);
} else if (res instanceof SearchParameter) {
minifySP((SearchParameter) res);
}
return true;
}
private void minifySP(SearchParameter sp) {
minCR(sp);
// nothing
}
private void minifyQ(Questionnaire q) {
@ -242,14 +243,20 @@ public class ResourceMinifier {
// can't remove anything else
}
private void minifySD(StructureDefinition sd) {
private boolean minifySD(StructureDefinition sd) {
if (sd.getKind() == StructureDefinitionKind.LOGICAL) {
return false;
}
minCR(sd);
sd.setKeyword(null);
sd.setMapping(null);
sd.setSnapshot(null);
sd.setMapping(null);
if (sd.hasDifferential()) {
sd.setSnapshot(null);
}
for (ElementDefinition ed : sd.getDifferential().getElement()) {
minifyED(ed);
}
return true;
}
private void minifyED(ElementDefinition ed) {
@ -272,6 +279,7 @@ public class ResourceMinifier {
abn.setShortDoco(null);
}
ed.setMapping(null);
ed.setMustSupportElement(null);
}
private void minCR(CanonicalResource cr) {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -49,7 +49,6 @@
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<optional>true</optional>
</dependency>

View File

@ -74,7 +74,10 @@ public class JsonLangFileProducer extends LanguageFileProducer {
public void entry(TextUnit unit) {
JsonObject entry = new JsonObject();
json.forceArray("entries").add(entry);
entry.add("context", unit.getContext());
entry.add("id", unit.getId());
if (unit.getContext1() != null) {
entry.add("context", unit.getContext1());
}
entry.add("source", unit.getSrcText());
entry.add("target", unit.getTgtText());
}
@ -88,7 +91,7 @@ public class JsonLangFileProducer extends LanguageFileProducer {
JsonObject json = JsonParser.parseObject(source);
for (JsonObject lang : json.forceArray("languages").asJsonObjects()) {
for (JsonObject entry : lang.forceArray("entries").asJsonObjects()) {
list.add(new TranslationUnit(lang.asString("targetLang"), entry.asString("context"), entry.asString("source"), entry.asString("target")));
list.add(new TranslationUnit(lang.asString("targetLang"), entry.asString("id"), entry.asString("context"), entry.asString("source"), entry.asString("target")));
}
}
return list;
@ -118,7 +121,10 @@ public class JsonLangFileProducer extends LanguageFileProducer {
for (TranslationUnit tu : translations) {
JsonObject entry = new JsonObject();
lj.forceArray("entries").add(entry);
entry.add("context", tu.getContext());
entry.add("id", tu.getId());
if (tu.getContext1() != null) {
entry.add("context", tu.getContext1());
}
entry.add("source", tu.getSrcText());
entry.add("target", tu.getTgtText());
}

View File

@ -18,35 +18,59 @@ import java.util.HashMap;
public abstract class LanguageFileProducer {
public static class TextUnit {
protected String id;
protected String context;
protected String srcText;
protected String tgtText;
public TextUnit(String context, String srcText, String tgtText) {
public TextUnit(String id, String context, String srcText, String tgtText) {
super();
this.id = id;
this.context = context;
this.srcText = srcText;
this.tgtText = tgtText;
}
public String getContext() {
/**
* The identity of the item being translated
*
* @return
*/
public String getId() {
return id;
}
/**
* Additional language that helps establish the context
* @return
*/
public String getContext1() {
return context;
}
/**
* The language that's being translated from
*
* @return
*/
public String getSrcText() {
return srcText;
}
/**
* The language that's being translated to
*
* @return
*/
public String getTgtText() {
return tgtText;
}
}
public static class TranslationUnit extends TextUnit {
private String language;
public TranslationUnit(String language, String context, String srcText, String tgtText) {
super(context, srcText, tgtText);
public TranslationUnit(String language, String id, String context, String srcText, String tgtText) {
super(id, context, srcText, tgtText);
this.language = language;
}

View File

@ -81,10 +81,10 @@ public class PoGetTextProducer extends LanguageFileProducer {
@Override
public void entry(TextUnit unit) {
ln("#: "+unit.getContext());
// if (context != null) {
// ln("#. "+context);
// }
ln("#: "+unit.getId());
if (unit.getContext1() != null) {
ln("#. "+unit.getContext1());
}
ln("msgid \""+unit.getSrcText()+"\"");
ln("msgstr \""+(unit.getTgtText() == null ? "" : unit.getTgtText())+"\"");
ln("");
@ -117,7 +117,11 @@ public class PoGetTextProducer extends LanguageFileProducer {
lang = p[1].trim();
}
} else if (s.startsWith("#:")) {
tu = new TranslationUnit(lang, s.substring(2).trim(), null, null);
tu = new TranslationUnit(lang, s.substring(2).trim(), null, null, null);
} else if (s.startsWith("#.")) {
if (tu != null) {
tu.setContext(s.substring(2).trim());
}
} else {
throw new IOException("Encountered unexpected line '"+s+"'");
}
@ -166,10 +170,10 @@ public class PoGetTextProducer extends LanguageFileProducer {
ln(po, "# "+baseLang+" -> "+targetLang);
ln(po, "");
for (TranslationUnit tu : translations) {
ln(po, "#: "+tu.getContext());
// if (context != null) {
// ln("#. "+context);
// }
ln(po, "#: "+tu.getId());
if (tu.getContext1() != null) {
ln(po, "#. "+tu.getContext1());
}
ln(po, "msgid \""+tu.getSrcText()+"\"");
ln(po, "msgstr \""+(tu.getTgtText() == null ? "" : tu.getTgtText())+"\"");
ln(po, "");

View File

@ -50,12 +50,12 @@ public class XLIFFProducer extends LanguageFileProducer {
@Override
public void entry(TextUnit unit) {
i++;
ln(" <trans-unit id=\""+id+"\" resname=\""+unit.getContext()+"\">");
// if (context != null) {
// ln(" <notes>");
// ln(" <note id=\"n"+i+"\">"+Utilities.escapeXml(context)+"</note>");
// ln(" </notes>");
// }
ln(" <trans-unit id=\""+id+"\" resname=\""+unit.getId()+"\">");
if (unit.getContext1() != null) {
ln(" <notes>");
ln(" <note id=\"n"+i+"\">"+Utilities.escapeXml(unit.getContext1())+"</note>");
ln(" </notes>");
}
ln(" <source>"+Utilities.escapeXml(unit.getSrcText())+"</source>");
ln(" <target>"+Utilities.escapeXml(unit.getTgtText())+"</target>");
ln(" </trans-unit>");
@ -114,7 +114,9 @@ public class XLIFFProducer extends LanguageFileProducer {
for (Element file : XMLUtil.getNamedChildren(xliff, "file")) {
Element body = XMLUtil.getNamedChild(file, "body");
for (Element transUnit : XMLUtil.getNamedChildren(body, "trans-unit")) {
TranslationUnit tu = new TranslationUnit(file.getAttribute("target-language"), transUnit.getAttribute("id"),
Element notes = XMLUtil.getNamedChild(transUnit, "notes");
TranslationUnit tu = new TranslationUnit(file.getAttribute("target-language"), transUnit.getAttribute("resname"),
notes == null ? null : XMLUtil.getNamedChildText(notes, "note"),
XMLUtil.getNamedChildText(transUnit, "source"), XMLUtil.getNamedChildText(transUnit, "target"));
if (!Utilities.noString(tu.getSrcText()) && !Utilities.noString(tu.getTgtText())) {
list.add(tu);
@ -149,7 +151,14 @@ public class XLIFFProducer extends LanguageFileProducer {
ln(xml, " <file source-language=\""+baseLang+"\" target-language=\""+targetLang+"\" id=\""+id+"\" original=\"Resource "+id+"\" datatype=\"KEYVALUEJSON\">");
ln(xml, " <body>");
for (TranslationUnit tu : translations) {
ln(xml, " <trans-unit id=\""+id+"\" resname=\""+tu.getContext()+"\">");
int i = 0;
ln(xml, " <trans-unit id=\""+id+"\" resname=\""+tu.getId()+"\">");
if (tu.getContext1() != null) {
i++;
ln(xml, " <notes>");
ln(xml, " <note id=\"n"+i+"\">"+Utilities.escapeXml(tu.getContext1())+"</note>");
ln(xml, " </notes>");
}
ln(xml, " <source>"+Utilities.escapeXml(tu.getSrcText())+"</source>");
ln(xml, " <target>"+Utilities.escapeXml(tu.getTgtText())+"</target>");
ln(xml, " </trans-unit>");

View File

@ -389,5 +389,9 @@ public class JsonObject extends JsonElement {
this.extraComma = extraComma;
}
public void clear() {
properties.clear();
propMap.clear();
}
}

View File

@ -52,6 +52,7 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
@ -60,6 +61,7 @@ import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipParameters;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.utilities.CommaSeparatedStringBuilder;
import org.hl7.fhir.utilities.SimpleHTTPClient;
@ -946,7 +948,9 @@ public class NpmPackage {
OutputStream = new ByteArrayOutputStream();
bufferedOutputStream = new BufferedOutputStream(OutputStream);
gzipOutputStream = new GzipCompressorOutputStream(bufferedOutputStream);
GzipParameters gp = new GzipParameters();
gp.setCompressionLevel(Deflater.BEST_COMPRESSION);
gzipOutputStream = new GzipCompressorOutputStream(stream, gp);
tar = new TarArchiveOutputStream(gzipOutputStream);

View File

@ -463,8 +463,8 @@ Version_mismatch_The_context_has_version__loaded_and_the_new_content_being_loade
Error_reading__from_package__ = Error reading {0} from package {1}#{2}: {3}
Error_parsing_ = Error parsing {0}:{1}
Unable_to_connect_to_terminology_server_Use_parameter_tx_na_tun_run_without_using_terminology_services_to_validate_LOINC_SNOMED_ICDX_etc_Error__ = Unable to connect to terminology server. Use parameter ''-tx n/a'' to run without using terminology services to validate LOINC, SNOMED, ICD-X etc. Error = {0}
Display_Name_for__should_be_one_of__instead_of_one = Wrong Display Name ''{4}'' for {1}#{2} - should be ''{3}'' (for the language(s) ''{5}'')
Display_Name_for__should_be_one_of__instead_of_other = Wrong Display Name ''{4}'' for {1}#{2} - should be one of {0} choices: ''{3}'' for the language(s) ''{5}''
Display_Name_for__should_be_one_of__instead_of_one = Wrong Display Name ''{4}'' for {1}#{2} - should be {3} (for the language(s) ''{5}'')
Display_Name_for__should_be_one_of__instead_of_other = Wrong Display Name ''{4}'' for {1}#{2} - should be one of {0} choices: {3} (for the language(s) ''{5}'')
Unknown_Code__in_ = Unknown Code ''{0}'' in the system ''{1}''
UNKNOWN_CODE__IN_FRAGMENT = Unknown Code ''{0}'' in the system ''{1}'' - note that the code system is labeled as a fragment, so the code may be valid in some other fragment
Code_found_in_expansion_however_ = Code found in expansion, however: {0}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -204,7 +204,6 @@
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
<optional>false</optional>
<scope>compile</scope>
</dependency>
@ -212,7 +211,13 @@
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<optional>false</optional>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-full</artifactId>
<optional>false</optional>
<scope>compile</scope>
</dependency>
@ -275,7 +280,6 @@
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
</dependency>
<dependency>
<groupId>com.atlassian.commonmark</groupId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -14,6 +14,7 @@
<properties>
<checkstyle_config_location>${project.parent.basedir}</checkstyle_config_location>
<info_cqframework_version>1.5.12</info_cqframework_version>
</properties>
<dependencies>
@ -132,6 +133,13 @@
<groupId>info.cqframework</groupId>
<artifactId>model</artifactId>
<version>${info_cqframework_version}</version>
<exclusions>
<!-- exclude this in favor of 1.9.4 for security reasons -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>info.cqframework</groupId>
@ -153,12 +161,15 @@
<artifactId>qdm</artifactId>
<version>${info_cqframework_version}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.9.4</version>
</dependency>
<!-- OkHttpDependency -->
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<optional>true</optional>
</dependency>

View File

@ -42,7 +42,7 @@ public class TxTestsTask extends StandaloneTask{
final String version = Params.getParam(args, Params.VERSION);
final String tx = Params.getParam(args, Params.TERMINOLOGY);
final String filter = Params.getParam(args, Params.FILTER);
boolean ok = new TxTester(new TxTester.InternalTxLoader(source, output), tx).setOutput(output).execute(version, filter);
boolean ok = new TxTester(new TxTester.InternalTxLoader(source, output), tx, false).setOutput(output).execute(version, filter);
System.exit(ok ? 1 : 0);
}
}

View File

@ -58,16 +58,18 @@ public class TxTester {
private String error;
private String output;
private ITerminologyClient tx;
private boolean tight;
public TxTester(ITxTesterLoader loader, String server) {
public TxTester(ITxTesterLoader loader, String server, boolean tight) {
super();
this.server = server;
this.loader = loader;
this.tight = tight;
}
public static void main(String[] args) throws Exception {
new TxTester(new InternalTxLoader(args[0]), args[1]).execute(args[2], args[3]);
new TxTester(new InternalTxLoader(args[0]), args[1], "true".equals(args[2])).execute(args[2], args[3]);
}
public boolean execute(String version, String filter) throws IOException, URISyntaxException {
@ -239,12 +241,12 @@ public class TxTester {
String vsj;
try {
ValueSet vs = tx.expandValueset(null, p, null);
TxTesterScrubbers.scrub(vs);
TxTesterScrubbers.scrubVS(vs, tight);
TxTesterSorters.sortValueSet(vs);
vsj = new org.hl7.fhir.r5.formats.JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(vs);
} catch (EFhirClientException e) {
OperationOutcome oo = e.getServerErrors().get(0);
TxTesterScrubbers.scrub(oo);
TxTesterScrubbers.scrubOO(oo, tight);
vsj = new org.hl7.fhir.r5.formats.JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(oo);
}
String diff = CompareUtilities.checkJsonSrcIsSame(resp, vsj);
@ -263,7 +265,7 @@ public class TxTester {
String pj;
try {
Parameters po = tx.validateVS(p);
TxTesterScrubbers.scrub(po);
TxTesterScrubbers.scrubParams(po);
TxTesterSorters.sortParameters(po);
pj = new org.hl7.fhir.r5.formats.JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(po);
} catch (EFhirClientException e) {

View File

@ -1,20 +1,85 @@
package org.hl7.fhir.validation.special;
import org.hl7.fhir.r5.model.DomainResource;
import org.hl7.fhir.r5.model.Element;
import org.hl7.fhir.r5.model.Extension;
import org.hl7.fhir.r5.model.OperationOutcome;
import org.hl7.fhir.r5.model.Parameters;
import org.hl7.fhir.r5.model.Resource;
import org.hl7.fhir.r5.model.ValueSet;
import org.hl7.fhir.r5.utils.ElementVisitor;
import org.hl7.fhir.r5.utils.ElementVisitor.IElementVisitor;
import org.hl7.fhir.utilities.Utilities;
public class TxTesterScrubbers {
public static void scrub(DomainResource dr) {
dr.setText(null);
dr.setMeta(null);
public static class TxTesterScrubberVisitor implements IElementVisitor {
private boolean tight;
protected TxTesterScrubberVisitor(boolean tight) {
super();
this.tight = tight;
}
private boolean isManagedExtension(Extension extension) {
return !tight || !Utilities.isAbsoluteUrl(extension.getUrl()) || Utilities.existsInList(extension.getUrl(),
"http://hl7.org/fhir/StructureDefinition/codesystem-alternate",
"http://hl7.org/fhir/StructureDefinition/codesystem-conceptOrder",
"http://hl7.org/fhir/StructureDefinition/codesystem-label",
"http://hl7.org/fhir/StructureDefinition/coding-sctdescid",
"http://hl7.org/fhir/StructureDefinition/itemWeight",
"http://hl7.org/fhir/StructureDefinition/rendering-style",
"http://hl7.org/fhir/StructureDefinition/rendering-xhtml",
"http://hl7.org/fhir/StructureDefinition/translation",
"http://hl7.org/fhir/StructureDefinition/valueset-concept-definition",
"http://hl7.org/fhir/StructureDefinition/valueset-conceptOrder",
"http://hl7.org/fhir/StructureDefinition/valueset-deprecated",
"http://hl7.org/fhir/StructureDefinition/valueset-label",
"http://hl7.org/fhir/StructureDefinition/valueset-supplement",
"http://hl7.org/fhir/test/CodeSystem/de-multi",
"http://hl7.org/fhir/test/CodeSystem/en-multi",
"http://hl7.org/fhir/test/StructureDefinition/unknown-extension-1",
"http://hl7.org/fhir/test/StructureDefinition/unknown-extension-3",
"http://hl7.org/fhir/test/StructureDefinition/unknown-extension-4",
"http://hl7.org/fhir/test/StructureDefinition/unknown-extension-5",
"http://hl7.org/fhir/test/ValueSet/extensions-bad-supplement",
"http://hl7.org/fhir/test/ValueSet/simple-all",
"http://hl7.org/fhir/test/ValueSet/simple-enumerated",
"http://hl7.org/fhir/test/ValueSet/simple-filter-isa");
}
@Override
public void visit(Resource resource) {
if (resource instanceof DomainResource) {
DomainResource dr = (DomainResource) resource;
dr.getExtension().removeIf(ext -> !isManagedExtension(ext));
}
}
@Override
public void visit(Element element) {
element.getExtension().removeIf(ext -> !isManagedExtension(ext));
}
}
public static void scrubDR(DomainResource dr, boolean tight) {
dr.setText(null);
dr.setMeta(null);
new ElementVisitor(new TxTesterScrubberVisitor(tight)).visit(dr);
}
public static void scrub(Parameters po) {
public static void scrubVS(ValueSet vs, boolean tight) {
scrubDR(vs, tight);
}
public static void scrubParams(Parameters po) {
po.setMeta(null);
}
public static void scrubOO(OperationOutcome po, boolean tight) {
scrubDR(po, tight);
}
}

View File

@ -93,7 +93,7 @@ public class ExternalTerminologyServiceTests implements ITxTesterLoader {
public void test() throws Exception {
if (SERVER != null) {
if (tester == null) {
tester = new TxTester(this, SERVER);
tester = new TxTester(this, SERVER, true);
}
String err = tester.executeTest(setup.suite, setup.test);
Assertions.assertTrue(err == null, err);

View File

@ -161,7 +161,7 @@ public class TerminologyServiceTests {
removeParameter(vse.getValueset(), "excludeNested");
}
TxTesterSorters.sortValueSet(vse.getValueset());
TxTesterScrubbers.scrub(vse.getValueset());
TxTesterScrubbers.scrubVS(vse.getValueset(), false);
String vsj = new JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(vse.getValueset());
String diff = CompareUtilities.checkJsonSrcIsSame(resp, vsj);
if (diff != null) {
@ -205,7 +205,7 @@ public class TerminologyServiceTests {
}
e.getDetails().setText(vse.getError());
oo.addIssue(e);
TxTesterScrubbers.scrub(oo);
TxTesterScrubbers.scrubOO(oo, false);
String ooj = new JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(oo);
String diff = CompareUtilities.checkJsonSrcIsSame(resp, ooj);
@ -291,7 +291,7 @@ public class TerminologyServiceTests {
res.addParameter().setName("issues").setResource(oo);
}
TxTesterSorters.sortParameters(res);
TxTesterScrubbers.scrub(res);
TxTesterScrubbers.scrubParams(res);
String pj = new JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(res);
String diff = CompareUtilities.checkJsonSrcIsSame(resp, pj);

53
pom.xml
View File

@ -14,23 +14,22 @@
HAPI FHIR
-->
<artifactId>org.hl7.fhir.core</artifactId>
<version>6.0.12-SNAPSHOT</version>
<version>6.0.16-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
<hapi_fhir_version>6.4.1</hapi_fhir_version>
<validator_test_case_version>1.3.8</validator_test_case_version>
<validator_test_case_version>1.3.10-SNAPSHOT</validator_test_case_version>
<jackson_version>2.14.0</jackson_version>
<junit_jupiter_version>5.9.2</junit_jupiter_version>
<junit_platform_launcher_version>1.8.2</junit_platform_launcher_version>
<maven_surefire_version>3.0.0-M5</maven_surefire_version>
<maven_clean_version>3.1.0</maven_clean_version>
<okhttp.version>4.9.3</okhttp.version>
<okhttp.version>4.10.0</okhttp.version>
<jacoco_version>0.8.9</jacoco_version>
<info_cqframework_version>1.5.12</info_cqframework_version>
<lombok_version>1.18.22</lombok_version>
<byte_buddy_version>1.12.14</byte_buddy_version>
<apache_poi_version>4.1.1</apache_poi_version>
<apache_poi_version>5.2.1</apache_poi_version>
<saxon_he_version>9.8.0-15</saxon_he_version>
<maven.compiler.release>11</maven.compiler.release>
<maven.compiler.source>11</maven.compiler.source>
@ -164,7 +163,51 @@
<groupId>net.sf.saxon</groupId>
<artifactId>Saxon-HE</artifactId>
<version>${saxon_he_version}</version>
</dependency>
<!-- Apache POI -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${apache_poi_version}</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${apache_poi_version}</version>
<exclusions>
<!-- exclude this because it collides with the needed poi-ooxml-full-->
<exclusion>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-lite</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- include this to replace poi-ooxml-lite in poi-ooxml -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-full</artifactId>
<version>${apache_poi_version}</version>
</dependency>
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<exclusions>
<!-- Exclude this because older jetbrains pom contains an insecure pom definition-->
<exclusion>
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Included because okttp3 used a vulnerable version -->
<dependency>
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
<version>16.0.1</version>
</dependency>
</dependencies>
</dependencyManagement>