Merge pull request #1125 from hapifhir/gg-202302-markdown
Gg 202302 markdown
This commit is contained in:
commit
bbd9c6b018
|
@ -43,6 +43,8 @@ public class ContextUtilities implements ProfileKnowledgeProvider {
|
|||
private XVerExtensionManager xverManager;
|
||||
private Map<String, String> oidCache = new HashMap<>();
|
||||
private List<StructureDefinition> allStructuresList = new ArrayList<StructureDefinition>();
|
||||
private List<String> canonicalResourceNames;
|
||||
private List<String> concreteResourceNames;
|
||||
|
||||
public ContextUtilities(IWorkerContext context) {
|
||||
super();
|
||||
|
@ -197,13 +199,17 @@ public class ContextUtilities implements ProfileKnowledgeProvider {
|
|||
* @return a list of the resource names that are canonical resources defined for this version
|
||||
*/
|
||||
public List<String> getCanonicalResourceNames() {
|
||||
List<String> names = new ArrayList<>();
|
||||
if (canonicalResourceNames == null) {
|
||||
canonicalResourceNames = new ArrayList<>();
|
||||
Set<String> names = new HashSet<>();
|
||||
for (StructureDefinition sd : allStructures()) {
|
||||
if (sd.getKind() == StructureDefinitionKind.RESOURCE && !sd.getAbstract() && hasUrlProperty(sd)) {
|
||||
names.add(sd.getType());
|
||||
}
|
||||
}
|
||||
return names;
|
||||
canonicalResourceNames.addAll(Utilities.sorted(names));
|
||||
}
|
||||
return canonicalResourceNames;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -370,5 +376,19 @@ public class ContextUtilities implements ProfileKnowledgeProvider {
|
|||
return null;
|
||||
}
|
||||
|
||||
public List<String> getConcreteResources() {
|
||||
if (concreteResourceNames == null) {
|
||||
concreteResourceNames = new ArrayList<>();
|
||||
Set<String> names = new HashSet<>();
|
||||
for (StructureDefinition sd : allStructures()) {
|
||||
if (sd.getKind() == StructureDefinitionKind.RESOURCE && !sd.getAbstract()) {
|
||||
names.add(sd.getType());
|
||||
}
|
||||
}
|
||||
concreteResourceNames.addAll(Utilities.sorted(names));
|
||||
}
|
||||
return concreteResourceNames;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -115,10 +115,13 @@ public class TypeDetails {
|
|||
public boolean isSystemType() {
|
||||
return uri.startsWith(FP_NS);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private List<ProfiledType> types = new ArrayList<ProfiledType>();
|
||||
private CollectionStatus collectionStatus;
|
||||
private Set<String> targets; // or, not and, canonical urls
|
||||
|
||||
public TypeDetails(CollectionStatus collectionStatus, String... names) {
|
||||
super();
|
||||
this.collectionStatus = collectionStatus;
|
||||
|
@ -268,7 +271,14 @@ public class TypeDetails {
|
|||
collectionStatus = source.collectionStatus;
|
||||
else
|
||||
collectionStatus = CollectionStatus.ORDERED;
|
||||
if (source.targets != null) {
|
||||
if (targets == null) {
|
||||
targets = new HashSet<>();
|
||||
}
|
||||
targets.addAll(source.targets);
|
||||
}
|
||||
}
|
||||
|
||||
public TypeDetails union(TypeDetails right) {
|
||||
TypeDetails result = new TypeDetails(null);
|
||||
if (right.collectionStatus == CollectionStatus.UNORDERED || collectionStatus == CollectionStatus.UNORDERED)
|
||||
|
@ -279,6 +289,16 @@ public class TypeDetails {
|
|||
result.addType(pt);
|
||||
for (ProfiledType pt : right.types)
|
||||
result.addType(pt);
|
||||
if (targets != null || right.targets != null) {
|
||||
result.targets = new HashSet<>();
|
||||
if (targets != null) {
|
||||
result.targets.addAll(targets);
|
||||
}
|
||||
if (right.targets != null) {
|
||||
result.targets.addAll(right.targets);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -297,6 +317,15 @@ public class TypeDetails {
|
|||
}
|
||||
for (ProfiledType pt : right.types)
|
||||
result.addType(pt);
|
||||
if (targets != null && right.targets != null) {
|
||||
result.targets = new HashSet<>();
|
||||
for (String s : targets) {
|
||||
if (right.targets.contains(s)) {
|
||||
result.targets.add(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -383,4 +412,30 @@ public class TypeDetails {
|
|||
}
|
||||
|
||||
|
||||
public void addTarget(String url) {
|
||||
if (targets == null) {
|
||||
targets = new HashSet<>();
|
||||
}
|
||||
targets.add(url);
|
||||
}
|
||||
public Set<String> getTargets() {
|
||||
return targets;
|
||||
}
|
||||
public boolean typesHaveTargets() {
|
||||
for (ProfiledType pt : types) {
|
||||
if (Utilities.existsInList(pt.getUri(), "Reference", "CodeableReference", "canonical", "http://hl7.org/fhir/StructureDefinition/Reference", "http://hl7.org/fhir/StructureDefinition/CodeableReference", "http://hl7.org/fhir/StructureDefinition/canonical")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
public void addTargets(Set<String> src) {
|
||||
if (src != null) {
|
||||
for (String s : src) {
|
||||
addTarget(s);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -223,12 +223,15 @@ public abstract class ResourceRenderer extends DataRenderer {
|
|||
tr = resolveReference(rw, r.getReference());
|
||||
|
||||
if (!r.getReference().startsWith("#")) {
|
||||
if (tr != null && tr.getReference() != null)
|
||||
if (tr != null && tr.getReference() != null) {
|
||||
c = x.ah(tr.getReference());
|
||||
else
|
||||
c = x.ah(r.getReference());
|
||||
} else if (r.getReference().contains("?")) {
|
||||
x.tx("Conditional Reference: ");
|
||||
c = x.code("");
|
||||
} else {
|
||||
c = x.ah(r.getReference());
|
||||
}
|
||||
} else {
|
||||
|
||||
c = x.ah(r.getReference());
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.hl7.fhir.r5.context.IWorkerContext.ValidationResult;
|
|||
import org.hl7.fhir.r5.model.Base;
|
||||
import org.hl7.fhir.r5.model.BaseDateTimeType;
|
||||
import org.hl7.fhir.r5.model.BooleanType;
|
||||
import org.hl7.fhir.r5.model.CanonicalType;
|
||||
import org.hl7.fhir.r5.model.CodeableConcept;
|
||||
import org.hl7.fhir.r5.model.Constants;
|
||||
import org.hl7.fhir.r5.model.DateTimeType;
|
||||
|
@ -1615,7 +1616,7 @@ public class FHIRPathEngine {
|
|||
result.update(executeContextType(context, exp.getName(), exp));
|
||||
} else {
|
||||
for (String s : focus.getTypes()) {
|
||||
result.update(executeType(s, exp, atEntry, elementDependencies));
|
||||
result.update(executeType(s, exp, atEntry, focus, elementDependencies));
|
||||
}
|
||||
if (result.hasNoTypes()) {
|
||||
throw makeException(exp, I18nConstants.FHIRPATH_UNKNOWN_NAME, exp.getName(), focus.describe());
|
||||
|
@ -1961,7 +1962,12 @@ public class FHIRPathEngine {
|
|||
case LessOrEqual: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case GreaterOrEqual: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case Is: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case As: return new TypeDetails(CollectionStatus.SINGLETON, right.getTypes());
|
||||
case As:
|
||||
TypeDetails td = new TypeDetails(CollectionStatus.SINGLETON, right.getTypes());
|
||||
if (td.typesHaveTargets()) {
|
||||
td.addTargets(left.getTargets());
|
||||
}
|
||||
return td;
|
||||
case Union: return left.union(right);
|
||||
case Or: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
case And: return new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_Boolean);
|
||||
|
@ -3146,12 +3152,12 @@ public class FHIRPathEngine {
|
|||
return hostServices.resolveConstantType(context.appInfo, name);
|
||||
}
|
||||
|
||||
private TypeDetails executeType(String type, ExpressionNode exp, boolean atEntry, Set<ElementDefinition> elementDependencies) throws PathEngineException, DefinitionException {
|
||||
private TypeDetails executeType(String type, ExpressionNode exp, boolean atEntry, TypeDetails focus, Set<ElementDefinition> elementDependencies) throws PathEngineException, DefinitionException {
|
||||
if (atEntry && Character.isUpperCase(exp.getName().charAt(0)) && hashTail(type).equals(exp.getName())) { // special case for start up
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, type);
|
||||
}
|
||||
TypeDetails result = new TypeDetails(null);
|
||||
getChildTypesByName(type, exp.getName(), result, exp, elementDependencies);
|
||||
getChildTypesByName(type, exp.getName(), result, exp, focus, elementDependencies);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -3217,11 +3223,19 @@ public class FHIRPathEngine {
|
|||
}
|
||||
case As : {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes, new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_String));
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
TypeDetails td = new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
if (td.typesHaveTargets()) {
|
||||
td.addTargets(focus.getTargets());
|
||||
}
|
||||
return td;
|
||||
}
|
||||
case OfType : {
|
||||
checkParamTypes(exp, exp.getFunction().toCode(), paramTypes, new TypeDetails(CollectionStatus.SINGLETON, TypeDetails.FP_String));
|
||||
return new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
TypeDetails td = new TypeDetails(CollectionStatus.SINGLETON, exp.getParameters().get(0).getName());
|
||||
if (td.typesHaveTargets()) {
|
||||
td.addTargets(focus.getTargets());
|
||||
}
|
||||
return td;
|
||||
}
|
||||
case Type : {
|
||||
boolean s = false;
|
||||
|
@ -3624,7 +3638,7 @@ public class FHIRPathEngine {
|
|||
private TypeDetails childTypes(TypeDetails focus, String mask, ExpressionNode expr) throws PathEngineException, DefinitionException {
|
||||
TypeDetails result = new TypeDetails(CollectionStatus.UNORDERED);
|
||||
for (String f : focus.getTypes()) {
|
||||
getChildTypesByName(f, mask, result, expr, null);
|
||||
getChildTypesByName(f, mask, result, expr, null, null);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -5746,7 +5760,7 @@ public class FHIRPathEngine {
|
|||
|
||||
}
|
||||
|
||||
private void getChildTypesByName(String type, String name, TypeDetails result, ExpressionNode expr, Set<ElementDefinition> elementDependencies) throws PathEngineException, DefinitionException {
|
||||
private void getChildTypesByName(String type, String name, TypeDetails result, ExpressionNode expr, TypeDetails focus, Set<ElementDefinition> elementDependencies) throws PathEngineException, DefinitionException {
|
||||
if (Utilities.noString(type)) {
|
||||
throw makeException(expr, I18nConstants.FHIRPATH_NO_TYPE, "", "getChildTypesByName");
|
||||
}
|
||||
|
@ -5757,8 +5771,6 @@ public class FHIRPathEngine {
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (type.equals(TypeDetails.FP_SimpleTypeInfo)) {
|
||||
getSimpleTypeChildTypesByName(name, result);
|
||||
} else if (type.equals(TypeDetails.FP_ClassInfo)) {
|
||||
|
@ -5816,7 +5828,7 @@ public class FHIRPathEngine {
|
|||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
getChildTypesByName(result.addType(tn), "**", result, expr, elementDependencies);
|
||||
getChildTypesByName(result.addType(tn), "**", result, expr, null, elementDependencies);
|
||||
}
|
||||
} else {
|
||||
for (TypeRefComponent t : ed.getType()) {
|
||||
|
@ -5833,14 +5845,14 @@ public class FHIRPathEngine {
|
|||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
getChildTypesByName(result.addType(rn), "**", result, expr, elementDependencies);
|
||||
getChildTypesByName(result.addType(rn), "**", result, expr, null, elementDependencies);
|
||||
}
|
||||
}
|
||||
} else if (!result.hasType(worker, tn)) {
|
||||
if (elementDependencies != null) {
|
||||
elementDependencies.add(ed);
|
||||
}
|
||||
getChildTypesByName(result.addType(tn), "**", result, expr, elementDependencies);
|
||||
getChildTypesByName(result.addType(tn), "**", result, expr, null, elementDependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5872,6 +5884,7 @@ public class FHIRPathEngine {
|
|||
elementDependencies.add(ed);
|
||||
}
|
||||
result.addType(t.getCode());
|
||||
copyTargetProfiles(ed, t, focus, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5919,6 +5932,7 @@ public class FHIRPathEngine {
|
|||
elementDependencies.add(ed.definition);
|
||||
}
|
||||
result.addType(ed.definition.unbounded() ? CollectionStatus.ORDERED : CollectionStatus.SINGLETON, pt);
|
||||
copyTargetProfiles(ed.getDefinition(), t, focus, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5928,6 +5942,18 @@ public class FHIRPathEngine {
|
|||
}
|
||||
}
|
||||
|
||||
private void copyTargetProfiles(ElementDefinition ed, TypeRefComponent t, TypeDetails focus, TypeDetails result) {
|
||||
if (t.hasTargetProfile()) {
|
||||
for (CanonicalType u : t.getTargetProfile()) {
|
||||
result.addTarget(u.primitiveValue());
|
||||
}
|
||||
} else if (focus != null && focus.hasType("CodeableReference") && ed.getPath().endsWith(".reference") && focus.getTargets() != null) { // special case, targets are on parent
|
||||
for (String s : focus.getTargets()) {
|
||||
result.addTarget(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addTypeAndDescendents(List<StructureDefinition> sdl, StructureDefinition dt, List<StructureDefinition> types) {
|
||||
sdl.add(dt);
|
||||
for (StructureDefinition sd : types) {
|
||||
|
|
|
@ -69,6 +69,125 @@ public class MarkDownProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this is intended to be processed as markdown
|
||||
*
|
||||
* this is guess, based on textual analysis of the content.
|
||||
*
|
||||
* Uses of this routine:
|
||||
* In general, the main use of this is to decide to escape the string so erroneous markdown processing doesn't munge characters
|
||||
* If it's a plain string, and it's being put into something that's markdown, then you should escape the content
|
||||
* If it's markdown, but you're not sure whether to process it as markdown
|
||||
*
|
||||
* The underlying problem is that markdown processing plain strings is problematic because some technical characters might
|
||||
* get lost. So it's good to escape them... but if it's meant to be markdown, then it'll get trashed.
|
||||
*
|
||||
* This method works by looking for character patterns that are unlikely to occur outside markdown - but it's still only unlikely
|
||||
*
|
||||
* @param content
|
||||
* @return
|
||||
*/
|
||||
// todo: dialect dependency?
|
||||
public boolean isProbablyMarkdown(String content, boolean mdIfParagrapghs) {
|
||||
if (mdIfParagrapghs && content.contains("\n")) {
|
||||
return true;
|
||||
}
|
||||
String[] lines = content.split("\\r?\\n");
|
||||
for (String s : lines) {
|
||||
if (s.startsWith("* ") || isHeading(s) || s.startsWith("1. ") || s.startsWith(" ")) {
|
||||
return true;
|
||||
}
|
||||
if (s.contains("```") || s.contains("~~~") || s.contains("[[[")) {
|
||||
return true;
|
||||
}
|
||||
if (hasLink(s)) {
|
||||
return true;
|
||||
}
|
||||
if (hasTextSpecial(s, '*') || hasTextSpecial(s, '_') ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isHeading(String s) {
|
||||
if (s.length() > 7 && s.startsWith("###### ") && !Character.isWhitespace(s.charAt(7))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 6 && s.startsWith("##### ") && !Character.isWhitespace(s.charAt(6))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 5 && s.startsWith("#### ") && !Character.isWhitespace(s.charAt(5))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 4 && s.startsWith("### ") && !Character.isWhitespace(s.charAt(4))) {
|
||||
return true;
|
||||
}
|
||||
if (s.length() > 3 && s.startsWith("## ") && !Character.isWhitespace(s.charAt(3))) {
|
||||
return true;
|
||||
}
|
||||
//
|
||||
// not sure about this one. # [string] is something that could easily arise in non-markdown,
|
||||
// so this appearing isn't enough to call it markdown
|
||||
//
|
||||
// if (s.length() > 2 && s.startsWith("# ") && !Character.isWhitespace(s.charAt(2))) {
|
||||
// return true;
|
||||
// }
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private boolean hasLink(String s) {
|
||||
int left = -1;
|
||||
int mid = -1;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
char c = s.charAt(i);
|
||||
if (c == '[') {
|
||||
mid = -1;
|
||||
left = i;
|
||||
} else if (left > -1 && i < s.length()-1 && c == ']' && s.charAt(i+1) == '(') {
|
||||
mid = i;
|
||||
} else if (left > -1 && c == ']') {
|
||||
left = -1;
|
||||
} else if (left > -1 && mid > -1 && c == ')') {
|
||||
return true;
|
||||
} else if (mid > -1 && c == '[' || c == ']' || (c == '(' && i > mid+1)) {
|
||||
left = -1;
|
||||
mid = -1;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private boolean hasTextSpecial(String s, char c) {
|
||||
boolean second = false;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
char prev = i == 0 ? ' ' : s.charAt(i-1);
|
||||
char next = i < s.length() - 1 ? s.charAt(i+1) : ' ';
|
||||
if (s.charAt(i) != c) {
|
||||
// nothing
|
||||
} else if (second) {
|
||||
if (Character.isWhitespace(next) && (isPunctation(prev) || Character.isLetterOrDigit(prev))) {
|
||||
return true;
|
||||
}
|
||||
second = false;
|
||||
} else {
|
||||
if (Character.isWhitespace(prev) && (isPunctation(next) || Character.isLetterOrDigit(next))) {
|
||||
second = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private boolean isPunctation(char ch) {
|
||||
return Utilities.existsInList(ch, '.', ',', '!', '?');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This deals with a painful problem created by the intersection of previous publishing processes
|
||||
* and the way commonmark specifies that < is handled in content. For control reasons, the FHIR specification does
|
||||
|
|
|
@ -774,6 +774,7 @@ public class I18nConstants {
|
|||
public static final String EXT_VER_URL_REVERSION = "EXT_VER_URL_REVERSION";
|
||||
public static final String ILLEGAL_COMMENT_TYPE = "ILLEGAL_COMMENT_TYPE";
|
||||
public static final String SD_NO_SLICING_ON_ROOT = "SD_NO_SLICING_ON_ROOT";
|
||||
public static final String REFERENCE_REF_QUERY_INVALID = "REFERENCE_REF_QUERY_INVALID";
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -150,7 +150,9 @@ public abstract class XhtmlFluent {
|
|||
|
||||
|
||||
public XhtmlNode code(String text) {
|
||||
return addTag("code").tx(text);
|
||||
XhtmlNode code = addTag("code");
|
||||
code.tx(text);
|
||||
return code;
|
||||
}
|
||||
|
||||
public XhtmlNode code() {
|
||||
|
|
|
@ -824,5 +824,6 @@ EXT_VER_URL_NOT_ALLOWED = The extension URL must not contain a version
|
|||
EXT_VER_URL_REVERSION = The extension URL must not contain a version. The extension was validated against version {0} of the extension
|
||||
ILLEGAL_COMMENT_TYPE = The fhir_comments property must be an array of strings
|
||||
SD_NO_SLICING_ON_ROOT = Slicing is not allowed at the root of a profile
|
||||
REFERENCE_REF_QUERY_INVALID = The query part of the conditional reference is not a valid query string ({0})
|
||||
|
||||
|
|
@ -2,15 +2,72 @@ package org.hl7.fhir.utilities;
|
|||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import org.hl7.fhir.utilities.MarkDownProcessor.Dialect;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class MarkdownTests {
|
||||
|
||||
@Test
|
||||
void testMarkdownDetection() {
|
||||
testMarkdown("this is a test string", false);
|
||||
testMarkdown("this is a \r\ntest string", false);
|
||||
testMarkdown("this is a \r\ntest string", true, true);
|
||||
testMarkdown("this is a t*est strin*g", false);
|
||||
testMarkdown("this is a *test strin*g", false);
|
||||
testMarkdown("this is a *test string*", true);
|
||||
testMarkdown("this is a *test *string", false);
|
||||
testMarkdown("this is a *test* string", true);
|
||||
testMarkdown("this [is] a test string", false);
|
||||
testMarkdown("this [is](link) a test string", true);
|
||||
testMarkdown("this [is](link a test string", false);
|
||||
testMarkdown("this [is] (link) a test string", false);
|
||||
testMarkdown("this [is(link)] a test string", false);
|
||||
testMarkdown("this [is](link a test string", false);
|
||||
testMarkdown("this [i]s] (link) a test string", false);
|
||||
testMarkdown("## heading", true);
|
||||
testMarkdown("# heading", false);
|
||||
testMarkdown("## heading", false);
|
||||
testMarkdown("###", false);
|
||||
}
|
||||
|
||||
private void testMarkdown(String content, boolean isMD) {
|
||||
testMarkdown(content, isMD, false);
|
||||
}
|
||||
|
||||
private void testMarkdown(String content, boolean isMD, boolean ifLines) {
|
||||
boolean test = new MarkDownProcessor(Dialect.COMMON_MARK).isProbablyMarkdown(content, ifLines);
|
||||
assertEquals(isMD, test);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStringToMarkdown() {
|
||||
// first, we test the need for replacing
|
||||
Assertions.assertEquals("<p>This is a string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is a string", null).trim());
|
||||
Assertions.assertEquals("<p>This is *a string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is *a string", null).trim());
|
||||
Assertions.assertNotEquals("<p>This is *a* string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is *a* string", null).trim());
|
||||
Assertions.assertEquals("<p>This is *a *string</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This is *a *string", null).trim());
|
||||
|
||||
Assertions.assertNotEquals("<p>This genomic study analyzes CYP2D6*1 and CYP2D6*2</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This genomic study analyzes CYP2D6*1 and CYP2D6*2", null).trim());
|
||||
Assertions.assertEquals("<p>This genomic study analyzes CYP2D6*1 and CYP2D6*2</p>", new MarkDownProcessor(Dialect.COMMON_MARK).process("This genomic study analyzes CYP2D6*1 and CYP2D6\\*2", null).trim());
|
||||
|
||||
|
||||
Assertions.assertEquals("This is \\*a test\\*", MarkDownProcessor.makeStringSafeAsMarkdown("This is *a test*"));
|
||||
Assertions.assertEquals("This is *a test*", MarkDownProcessor.makeMarkdownForString("This is \\*a test\\*"));
|
||||
}
|
||||
|
||||
}
|
||||
//
|
||||
//case '*':
|
||||
//case '&':
|
||||
//case '#':
|
||||
//case '[':
|
||||
//case '>':
|
||||
//case '<':
|
||||
//case '`':
|
||||
// -
|
||||
// |
|
||||
// :
|
||||
// ~
|
||||
// ^
|
||||
// =
|
|
@ -1065,7 +1065,7 @@ public class BaseValidator implements IValidationContextResourceLoader {
|
|||
if (!context.getResourceNames().contains(tn)) {
|
||||
return false;
|
||||
} else {
|
||||
return q.matches("([_a-zA-Z][_a-zA-Z0-9]*=[^=&]+)(&([_a-zA-Z][_a-zA-Z0-9]*=[^=&]+))*");
|
||||
return q.matches("([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*)(&([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*))*");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3262,6 +3262,7 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
refType = "bundled";
|
||||
}
|
||||
}
|
||||
boolean conditional = ref.contains("?") && Utilities.existsInList(ref.substring(0, ref.indexOf("?")), context.getResourceNames());
|
||||
ReferenceValidationPolicy pol;
|
||||
if (refType.equals("contained") || refType.equals("bundled")) {
|
||||
pol = ReferenceValidationPolicy.CHECK_VALID;
|
||||
|
@ -3273,7 +3274,13 @@ public class InstanceValidator extends BaseValidator implements IResourceValidat
|
|||
}
|
||||
}
|
||||
|
||||
if (pol.checkExists()) {
|
||||
if (conditional) {
|
||||
String query = ref.substring(ref.indexOf("?"));
|
||||
boolean test = !Utilities.noString(query) && query.matches("\\?([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*)(&([_a-zA-Z][_a-zA-Z0-9]*=[^=&]*))*");
|
||||
//("^\\?([\\w-]+(=[\\w-]*)?(&[\\w-]+(=[\\w-]*)?)*)?$"),
|
||||
ok = rule(errors, "2023-02-20", IssueType.INVALID, element.line(), element.col(), path, test, I18nConstants.REFERENCE_REF_QUERY_INVALID, ref) && ok;
|
||||
|
||||
} else if (pol.checkExists()) {
|
||||
if (we == null) {
|
||||
if (!refType.equals("contained")) {
|
||||
if (fetcher == null) {
|
||||
|
|
|
@ -79,3 +79,13 @@ v: {
|
|||
"system" : "http://unitsofmeasure.org"
|
||||
}
|
||||
-------------------------------------------------------------------------------------
|
||||
{"code" : {
|
||||
"system" : "http://unitsofmeasure.org",
|
||||
"code" : "cm"
|
||||
}, "valueSet" :null, "lang":"null", "useServer":"true", "useClient":"true", "guessSystem":"false", "valueSetMode":"ALL_CHECKS", "versionFlexible":"false"}####
|
||||
v: {
|
||||
"display" : "cm",
|
||||
"code" : "cm",
|
||||
"system" : "http://unitsofmeasure.org"
|
||||
}
|
||||
-------------------------------------------------------------------------------------
|
||||
|
|
Loading…
Reference in New Issue