diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 9ec40eb7254..5edacfafad4 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -210,7 +210,7 @@ public abstract class BaseHapiFhirDao implements IDao {
} catch (DataFormatException e) {
throw new InvalidRequestException("Invalid resource reference found at path[" + nextPathsUnsplit + "] - Resource type is unknown or not supported on this server - " + nextValue.getReference().getValue());
}
-
+
Class extends IBaseResource> type = resourceDefinition.getImplementingClass();
String id = nextValue.getReference().getIdPart();
if (StringUtils.isBlank(id)) {
@@ -271,6 +271,7 @@ public abstract class BaseHapiFhirDao implements IDao {
protected List extractSearchParamUri(ResourceTable theEntity, IResource theResource) {
return mySearchParamExtractor.extractSearchParamUri(theEntity, theResource);
}
+
protected List extractSearchParamCoords(ResourceTable theEntity, IResource theResource) {
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
}
@@ -591,7 +592,7 @@ public abstract class BaseHapiFhirDao implements IDao {
}
Set allDefs = new HashSet();
-
+
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource);
if (tagList != null) {
for (Tag next : tagList) {
@@ -610,8 +611,8 @@ public abstract class BaseHapiFhirDao implements IDao {
theEntity.addTag(tag);
theEntity.setHasTags(true);
}
- }
-
+ }
+
List profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource);
if (profiles != null) {
for (IIdType next : profiles) {
@@ -621,18 +622,18 @@ public abstract class BaseHapiFhirDao implements IDao {
theEntity.setHasTags(true);
}
}
-
-
+
for (ResourceTag next : new ArrayList(theEntity.getTags())) {
TagDefinition nextDef = next.getTag();
if (!allDefs.contains(nextDef)) {
- theEntity.getTags().remove(next);
+ if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
+ theEntity.getTags().remove(next);
+ }
}
}
if (theEntity.getTags().size() == 0) {
theEntity.setHasTags(false);
}
-
String title = ResourceMetadataKeyEnum.TITLE.get(theResource);
if (title != null && title.length() > BaseHasResource.MAX_TITLE_LENGTH) {
@@ -642,6 +643,29 @@ public abstract class BaseHapiFhirDao implements IDao {
}
+ /**
+ * This method is called when an update to an existing resource detects that the resource supplied for update is
+ * missing a tag/profile/security label that the currently persisted resource holds.
+ *
+ * The default implementation removes any profile declarations, but leaves tags and security labels in place.
+ * Subclasses may choose to override and change this behaviour.
+ *
+ *
+ * @param theEntity
+ * The entity being updated (Do not modify the entity! Undefined behaviour will occur!)
+ * @param theTag
+ * The tag
+ * @return Retturns true
if the tag should be removed
+ * @see Updates to Tags, Profiles, and Security
+ * Labels for a description of the logic that the default behaviour folows.
+ */
+ protected boolean shouldDroppedTagBeRemovedOnUpdate(ResourceTable theEntity, ResourceTag theTag) {
+ if (theTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
+ return true;
+ }
+ return false;
+ }
+
protected Set processMatchUrl(String theMatchUrl, Class theResourceType) {
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(theResourceType);
@@ -1260,17 +1284,15 @@ public abstract class BaseHapiFhirDao implements IDao {
}
/**
- * This method is invoked immediately before storing a new resource, or an
- * update to an existing resource to allow the DAO to ensure that it is valid
- * for persistence. By default, checks for the "subsetted" tag and rejects
- * resources which have it. Subclasses should call the superclass implementation to
- * preserve this check.
+ * This method is invoked immediately before storing a new resource, or an update to an existing resource to allow
+ * the DAO to ensure that it is valid for persistence. By default, checks for the "subsetted" tag and rejects
+ * resources which have it. Subclasses should call the superclass implementation to preserve this check.
*
* @param theResource
* The resource that is about to be persisted
*/
protected void validateResourceForStorage(T theResource) {
- IResource res = (IResource)theResource;
+ IResource res = (IResource) theResource;
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(res);
if (tagList != null) {
Tag tag = tagList.getTag(Constants.TAG_SUBSETTED_SYSTEM, Constants.TAG_SUBSETTED_CODE);
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoDstu2UpdateTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoDstu2UpdateTest.java
index 230380c6821..22dce227967 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoDstu2UpdateTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoDstu2UpdateTest.java
@@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.dao;
import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.not;
@@ -17,9 +18,8 @@ import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.Date;
-import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import org.hl7.fhir.instance.model.api.IBaseResource;
@@ -27,10 +27,12 @@ import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
-import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
+import ca.uhn.fhir.model.api.Tag;
import ca.uhn.fhir.model.api.TagList;
+import ca.uhn.fhir.model.base.composite.BaseCodingDt;
+import ca.uhn.fhir.model.dstu2.composite.CodingDt;
import ca.uhn.fhir.model.dstu2.resource.Organization;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.primitive.IdDt;
@@ -166,6 +168,62 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
myPatientDao.update(p);
}
+ /**
+ * Per the spec, update should preserve tags and security labels but not profiles
+ */
+ @Test
+ public void testUpdateMaintainsTagsAndSecurityLabels() throws InterruptedException {
+ String methodName = "testUpdateMaintainsTagsAndSecurityLabels";
+
+ IIdType p1id;
+ {
+ Patient p1 = new Patient();
+ p1.addName().addFamily(methodName);
+
+ TagList tagList = new TagList();
+ tagList.addTag("tag_scheme1", "tag_term1");
+ ResourceMetadataKeyEnum.TAG_LIST.put(p1, tagList);
+ List secList = new ArrayList();
+ secList.add(new CodingDt("sec_scheme1", "sec_term1"));
+ ResourceMetadataKeyEnum.SECURITY_LABELS.put(p1, secList);
+ List profileList = new ArrayList();
+ profileList.add(new IdDt("http://foo1"));
+ ResourceMetadataKeyEnum.PROFILES.put(p1, profileList);
+
+ p1id = myPatientDao.create(p1).getId().toUnqualifiedVersionless();
+ }
+ {
+ Patient p1 = new Patient();
+ p1.setId(p1id);
+ p1.addName().addFamily(methodName);
+
+ TagList tagList = new TagList();
+ tagList.addTag("tag_scheme2", "tag_term2");
+ ResourceMetadataKeyEnum.TAG_LIST.put(p1, tagList);
+ List secList = new ArrayList();
+ secList.add(new CodingDt("sec_scheme2", "sec_term2"));
+ ResourceMetadataKeyEnum.SECURITY_LABELS.put(p1, secList);
+ List profileList = new ArrayList();
+ profileList.add(new IdDt("http://foo2"));
+ ResourceMetadataKeyEnum.PROFILES.put(p1, profileList);
+
+ myPatientDao.update(p1);
+ }
+ {
+ Patient p1 = myPatientDao.read(p1id);
+ TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(p1);
+ assertThat(tagList, containsInAnyOrder(new Tag("tag_scheme1", "tag_term1"), new Tag("tag_scheme2", "tag_term2")));
+ List secList = ResourceMetadataKeyEnum.SECURITY_LABELS.get(p1);
+ Set secListValues = new HashSet();
+ for (BaseCodingDt next : secList) {
+ secListValues.add(next.getSystemElement().getValue() + "|" + next.getCodeElement().getValue());
+ }
+ assertThat(secListValues, containsInAnyOrder("sec_scheme1|sec_term1", "sec_scheme2|sec_term2"));
+ List profileList = ResourceMetadataKeyEnum.PROFILES.get(p1);
+ assertThat(profileList, contains(new IdDt("http://foo2"))); // no foo1
+ }
+ }
+
@Test
public void testUpdateMaintainsSearchParams() throws InterruptedException {
Patient p1 = new Patient();
@@ -236,13 +294,13 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
{
Patient patient = new Patient();
patient.addName().addFamily(name);
-
+
List tl = new ArrayList();
tl.add(new IdDt("http://foo/bar"));
tl.add(new IdDt("http://foo/bar"));
tl.add(new IdDt("http://foo/bar"));
ResourceMetadataKeyEnum.PROFILES.put(patient, tl);
-
+
id = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
@@ -255,7 +313,7 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
}
}
-
+
@Test
public void testUpdateModifiesProfiles() {
String name = "testUpdateModifiesProfiles";
@@ -263,11 +321,11 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
{
Patient patient = new Patient();
patient.addName().addFamily(name);
-
+
List tl = new ArrayList();
tl.add(new IdDt("http://foo/bar"));
ResourceMetadataKeyEnum.PROFILES.put(patient, tl);
-
+
id = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
@@ -284,11 +342,11 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
Patient patient = new Patient();
patient.setId(id);
patient.addName().addFamily(name);
-
+
List tl = new ArrayList();
tl.add(new IdDt("http://foo/baz"));
ResourceMetadataKeyEnum.PROFILES.put(patient, tl);
-
+
id = myPatientDao.update(patient).getId().toUnqualifiedVersionless();
}
@@ -300,11 +358,8 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
assertEquals("http://foo/baz", tl.get(0).getValue());
}
-
}
-
-
@Test
public void testUpdateUnknownNumericIdFails() {
Patient p = new Patient();
diff --git a/src/site/xdoc/doc_validation.xml b/src/site/xdoc/doc_validation.xml
index b32e12539b3..1cbae15bd46 100644
--- a/src/site/xdoc/doc_validation.xml
+++ b/src/site/xdoc/doc_validation.xml
@@ -40,17 +40,18 @@
There are two implementations of IParserErrorHandler
worth
- mentioning.
+ mentioning. You can also supply your own implementation if you want.
-
-
+ logs any errors but does not abort parsing. By default this handler is used, and it
+ logs errors at "warning" level. It can also be configured to silently ignore issues.
+
+ -
StrictErrorHandler
throws a
DataFormatException
if any errors are detected.
-
+
@@ -82,37 +83,39 @@
-
+
HAPI provides a built-in and configurable mechanism for validating resources.
- This mechanism is called the FHIR Validator.
+ This mechanism is called the Resource Validator.
+
+
+
+ FHIR resource definitions are distributed with a set of XML schema files (XSD)
+ as well as a set of XML Schematron (SCH) files. These two sets of files are
+ complimentary to each other, meaning that in order to claim compliance to the
+ FHIR specification, your resources must validate against both sets.
+
+
+ The two sets of files are included with HAPI, and it uses them to perform
+ validation.
-
-
-
- FHIR resource definitions are distributed with a set of XML schema files (XSD)
- as well as a set of XML Schematron (SCH) files. These two sets of files are
- complimentary to each other, meaning that in order to claim compliance to the
- FHIR specification, your resources must validate against both sets.
-
-
- The two sets of files are included with HAPI, and it uses them to perform
- validation.
-
-
-
-
In order to use HAPI's Schematron support, a libaray called
Phloc-Schematron
- is used, so this library must be added to your classpath (or Maven pom file).
- See Downloads for more information.
+ is used, so this library must be added to your classpath (or Maven POM file, Gradle
+ file, etc.)
+ Note that this library is specified as an optional dependency by HAPI FHIR
+ so you need to explicitly include it if you want to use this
+ functionality.
+
+
+ See Downloads for more information on how
+ to add it.
-
@@ -142,6 +145,46 @@
+
+
+
+
+
+
+ As of HAPI FHIR 1.2, HAPI supports validation against StructureDefinition
+ resources. This functionality uses the HL7 "InstanceValidator", which is able
+ to check a resource for conformance to a given profile (StructureDefinition),
+ including validating codes for conformance to their given ValueSets.
+
+
+ StructureDefinition validation can be used to validate a resource against the
+ official structure definitions (produced by HL7) as well as against custom
+ definitions provided either by HL7 or by the user.
+
+
+
+
+
+ To use this functionality, you must add the following two dependencies
+ to your classpath (or Maven POM file, Gradle file, etc.):
+
+
+ -
+ hapi-fhir-structures-hl7org-dstu2: This file contains the "reference implementation"
+ structures and tooling. You need to include it even if you are not using the RI model
+ (the StructureDefinition validation will work against HAPI structures as well)
+
+ -
+ hapi-fhir-validation-resources: This file contains the official FHIR
+ StructureDefinition files, and the ValueSets needed to support them.
+
+
+
+ See the download page for more information.
+
+
+
+