Correct tag handling for DSTU2

This commit is contained in:
jamesagnew 2015-09-02 07:46:52 -04:00
parent 517222d183
commit 875ad3b5f9
4 changed files with 215 additions and 67 deletions

View File

@ -210,7 +210,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} catch (DataFormatException e) {
throw new InvalidRequestException("Invalid resource reference found at path[" + nextPathsUnsplit + "] - Resource type is unknown or not supported on this server - " + nextValue.getReference().getValue());
}
Class<? extends IBaseResource> type = resourceDefinition.getImplementingClass();
String id = nextValue.getReference().getIdPart();
if (StringUtils.isBlank(id)) {
@ -271,6 +271,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
protected List<ResourceIndexedSearchParamUri> extractSearchParamUri(ResourceTable theEntity, IResource theResource) {
return mySearchParamExtractor.extractSearchParamUri(theEntity, theResource);
}
protected List<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IResource theResource) {
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
}
@ -591,7 +592,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
Set<TagDefinition> allDefs = new HashSet<TagDefinition>();
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource);
if (tagList != null) {
for (Tag next : tagList) {
@ -610,8 +611,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.addTag(tag);
theEntity.setHasTags(true);
}
}
}
List<IdDt> profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource);
if (profiles != null) {
for (IIdType next : profiles) {
@ -621,18 +622,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setHasTags(true);
}
}
for (ResourceTag next : new ArrayList<ResourceTag>(theEntity.getTags())) {
TagDefinition nextDef = next.getTag();
if (!allDefs.contains(nextDef)) {
theEntity.getTags().remove(next);
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
theEntity.getTags().remove(next);
}
}
}
if (theEntity.getTags().size() == 0) {
theEntity.setHasTags(false);
}
String title = ResourceMetadataKeyEnum.TITLE.get(theResource);
if (title != null && title.length() > BaseHasResource.MAX_TITLE_LENGTH) {
@ -642,6 +643,29 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
/**
* This method is called when an update to an existing resource detects that the resource supplied for update is
* missing a tag/profile/security label that the currently persisted resource holds.
* <p>
* The default implementation removes any profile declarations, but leaves tags and security labels in place.
* Subclasses may choose to override and change this behaviour.
* </p>
*
* @param theEntity
* The entity being updated (Do not modify the entity! Undefined behaviour will occur!)
* @param theTag
* The tag
* @return Retturns <code>true</code> if the tag should be removed
* @see <a href="http://hl7.org/fhir/2015Sep/resource.html#1.11.3.7">Updates to Tags, Profiles, and Security
* Labels</a> for a description of the logic that the default behaviour folows.
*/
protected boolean shouldDroppedTagBeRemovedOnUpdate(ResourceTable theEntity, ResourceTag theTag) {
if (theTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
return true;
}
return false;
}
protected <R extends IResource> Set<Long> processMatchUrl(String theMatchUrl, Class<R> theResourceType) {
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(theResourceType);
@ -1260,17 +1284,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
/**
* This method is invoked immediately before storing a new resource, or an
* update to an existing resource to allow the DAO to ensure that it is valid
* for persistence. By default, checks for the "subsetted" tag and rejects
* resources which have it. Subclasses should call the superclass implementation to
* preserve this check.
* This method is invoked immediately before storing a new resource, or an update to an existing resource to allow
* the DAO to ensure that it is valid for persistence. By default, checks for the "subsetted" tag and rejects
* resources which have it. Subclasses should call the superclass implementation to preserve this check.
*
* @param theResource
* The resource that is about to be persisted
*/
protected void validateResourceForStorage(T theResource) {
IResource res = (IResource)theResource;
IResource res = (IResource) theResource;
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(res);
if (tagList != null) {
Tag tag = tagList.getTag(Constants.TAG_SUBSETTED_SYSTEM, Constants.TAG_SUBSETTED_CODE);

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.dao;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.not;
@ -17,9 +18,8 @@ import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -27,10 +27,12 @@ import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.Tag;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.dstu2.composite.CodingDt;
import ca.uhn.fhir.model.dstu2.resource.Organization;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.primitive.IdDt;
@ -166,6 +168,62 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
myPatientDao.update(p);
}
/**
* Per the spec, update should preserve tags and security labels but not profiles
*/
@Test
public void testUpdateMaintainsTagsAndSecurityLabels() throws InterruptedException {
String methodName = "testUpdateMaintainsTagsAndSecurityLabels";
IIdType p1id;
{
Patient p1 = new Patient();
p1.addName().addFamily(methodName);
TagList tagList = new TagList();
tagList.addTag("tag_scheme1", "tag_term1");
ResourceMetadataKeyEnum.TAG_LIST.put(p1, tagList);
List<BaseCodingDt> secList = new ArrayList<BaseCodingDt>();
secList.add(new CodingDt("sec_scheme1", "sec_term1"));
ResourceMetadataKeyEnum.SECURITY_LABELS.put(p1, secList);
List<IdDt> profileList = new ArrayList<IdDt>();
profileList.add(new IdDt("http://foo1"));
ResourceMetadataKeyEnum.PROFILES.put(p1, profileList);
p1id = myPatientDao.create(p1).getId().toUnqualifiedVersionless();
}
{
Patient p1 = new Patient();
p1.setId(p1id);
p1.addName().addFamily(methodName);
TagList tagList = new TagList();
tagList.addTag("tag_scheme2", "tag_term2");
ResourceMetadataKeyEnum.TAG_LIST.put(p1, tagList);
List<BaseCodingDt> secList = new ArrayList<BaseCodingDt>();
secList.add(new CodingDt("sec_scheme2", "sec_term2"));
ResourceMetadataKeyEnum.SECURITY_LABELS.put(p1, secList);
List<IdDt> profileList = new ArrayList<IdDt>();
profileList.add(new IdDt("http://foo2"));
ResourceMetadataKeyEnum.PROFILES.put(p1, profileList);
myPatientDao.update(p1);
}
{
Patient p1 = myPatientDao.read(p1id);
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(p1);
assertThat(tagList, containsInAnyOrder(new Tag("tag_scheme1", "tag_term1"), new Tag("tag_scheme2", "tag_term2")));
List<BaseCodingDt> secList = ResourceMetadataKeyEnum.SECURITY_LABELS.get(p1);
Set<String> secListValues = new HashSet<String>();
for (BaseCodingDt next : secList) {
secListValues.add(next.getSystemElement().getValue() + "|" + next.getCodeElement().getValue());
}
assertThat(secListValues, containsInAnyOrder("sec_scheme1|sec_term1", "sec_scheme2|sec_term2"));
List<IdDt> profileList = ResourceMetadataKeyEnum.PROFILES.get(p1);
assertThat(profileList, contains(new IdDt("http://foo2"))); // no foo1
}
}
@Test
public void testUpdateMaintainsSearchParams() throws InterruptedException {
Patient p1 = new Patient();
@ -236,13 +294,13 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
{
Patient patient = new Patient();
patient.addName().addFamily(name);
List<IdDt> tl = new ArrayList<IdDt>();
tl.add(new IdDt("http://foo/bar"));
tl.add(new IdDt("http://foo/bar"));
tl.add(new IdDt("http://foo/bar"));
ResourceMetadataKeyEnum.PROFILES.put(patient, tl);
id = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
@ -255,7 +313,7 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
}
}
@Test
public void testUpdateModifiesProfiles() {
String name = "testUpdateModifiesProfiles";
@ -263,11 +321,11 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
{
Patient patient = new Patient();
patient.addName().addFamily(name);
List<IdDt> tl = new ArrayList<IdDt>();
tl.add(new IdDt("http://foo/bar"));
ResourceMetadataKeyEnum.PROFILES.put(patient, tl);
id = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
@ -284,11 +342,11 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
Patient patient = new Patient();
patient.setId(id);
patient.addName().addFamily(name);
List<IdDt> tl = new ArrayList<IdDt>();
tl.add(new IdDt("http://foo/baz"));
ResourceMetadataKeyEnum.PROFILES.put(patient, tl);
id = myPatientDao.update(patient).getId().toUnqualifiedVersionless();
}
@ -300,11 +358,8 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
assertEquals("http://foo/baz", tl.get(0).getValue());
}
}
@Test
public void testUpdateUnknownNumericIdFails() {
Patient p = new Patient();

View File

@ -40,17 +40,18 @@
</p>
<p>
There are two implementations of <code>IParserErrorHandler</code> worth
mentioning.
mentioning. You can also supply your own implementation if you want.
</p>
<ul>
<ul>
<li>
<a href="./apidocs/ca/uhn/fhir/parser/LenientErrorHandler.html">LenientErrorHandler</a>
logs any errors but does not abort parsing. This is the default.
</ul>
<ul>
logs any errors but does not abort parsing. By default this handler is used, and it
logs errors at "warning" level. It can also be configured to silently ignore issues.
</li>
<li>
<a href="./apidocs/ca/uhn/fhir/parser/StrictErrorHandler.html">StrictErrorHandler</a>
throws a <code>DataFormatException</code> if any errors are detected.
</ul>
</li>
</ul>
<p>
@ -82,37 +83,39 @@
<!-- RESOURCE VALIDATION -->
<section name="Resource Validation">
<section name="Resource Validation (Schema/Schematron)">
<p>
HAPI provides a built-in and configurable mechanism for validating resources.
This mechanism is called the <i>FHIR Validator</i>.
This mechanism is called the <i>Resource Validator</i>.
</p>
<p>
FHIR resource definitions are distributed with a set of XML schema files (XSD)
as well as a set of XML Schematron (SCH) files. These two sets of files are
complimentary to each other, meaning that in order to claim compliance to the
FHIR specification, your resources must validate against both sets.
</p>
<p>
The two sets of files are included with HAPI, and it uses them to perform
validation.
</p>
<subsection name="Background">
<p>
FHIR resource definitions are distributed with a set of XML schema files (XSD)
as well as a set of XML Schematron (SCH) files. These two sets of files are
complimentary to each other, meaning that in order to claim compliance to the
FHIR specification, your resources must validate against both sets.
</p>
<p>
The two sets of files are included with HAPI, and it uses them to perform
validation.
</p>
</subsection>
<subsection name="Preparation">
<p>
In order to use HAPI's Schematron support, a libaray called
<a href="https://code.google.com/p/phloc-schematron/">Phloc-Schematron</a>
is used, so this library must be added to your classpath (or Maven pom file).
See <a href="./download.html">Downloads</a> for more information.
is used, so this library must be added to your classpath (or Maven POM file, Gradle
file, etc.)
Note that this library is specified as an optional dependency by HAPI FHIR
so you need to explicitly include it if you want to use this
functionality.
</p>
<p>
See <a href="./download.html">Downloads</a> for more information on how
to add it.
</p>
</subsection>
<subsection name="Validating a Resource">
@ -142,6 +145,46 @@
</subsection>
<a name="structure_definition_validation"/>
</section>
<section name="Resource Validation (StructureDefinition / ValueSet)">
<p>
As of HAPI FHIR 1.2, HAPI supports validation against StructureDefinition
resources. This functionality uses the HL7 "InstanceValidator", which is able
to check a resource for conformance to a given profile (StructureDefinition),
including validating codes for conformance to their given ValueSets.
</p>
<p>
StructureDefinition validation can be used to validate a resource against the
official structure definitions (produced by HL7) as well as against custom
definitions provided either by HL7 or by the user.
</p>
<subsection name="Preparation">
<p>
To use this functionality, you must add the following two dependencies
to your classpath (or Maven POM file, Gradle file, etc.):
</p>
<ul>
<li>
<b>hapi-fhir-structures-hl7org-dstu2</b>: This file contains the "reference implementation"
structures and tooling. You need to include it even if you are not using the RI model
(the StructureDefinition validation will work against HAPI structures as well)
</li>
<li>
<b>hapi-fhir-validation-resources</b>: This file contains the official FHIR
StructureDefinition files, and the ValueSets needed to support them.
</li>
</ul>
<p>
See the <a href="./download.html">download page</a> for more information.
</p>
</subsection>
</section>
</body>

View File

@ -12,26 +12,38 @@
<table>
<thead>
<tr>
<td>HAPI Structure Library</td>
<td>HAPI Version</td>
<td>FHIR Specification Version</td>
<td><b>HAPI Structure Library</b></td>
<td><b>HAPI Version</b></td>
<td><b>FHIR Specification Version</b></td>
</tr>
</thead>
<tbody>
<tr>
<td>hapi-fhir-structures-dstu</td>
<td rowspan="2">hapi-fhir-structures-dstu</td>
<td>1.1</td>
<td><a href="http://www.hl7.org/fhir/DSTU1/index.html">DSTU1 0.0.82</a></td>
</tr>
<tr>
<td>hapi-fhir-structures-dstu2</td>
<td>1.2</td>
<td><a href="http://www.hl7.org/fhir/DSTU1/index.html">DSTU1 0.0.82</a></td>
</tr>
<tr>
<td rowspan="2">hapi-fhir-structures-dstu2</td>
<td>1.1</td>
<td><a href="http://www.hl7.org/fhir/2015May/index.html">DSTU2 0.5.0</a></td>
</tr>
<tr>
<td>hapi-fhir-structures-hl7org-dstu2</td>
<td>1.2</td>
<td><a href="http://hl7.org/fhir/2015Sep/index.html">DSTU2 1.0.0</a></td>
</tr>
<tr>
<td rowspan="2">hapi-fhir-structures-hl7org-dstu2</td>
<td>1.1</td>
<td><a href="http://hl7-fhir.github.io/">DSTU2 Latest Dev</a> (SVN 5843)</td>
<td><a href="http://hl7-fhir.github.io/">DSTU2 0.5.0 Snapshot</a> (SVN 5843)</td>
</tr>
<tr>
<td>1.2</td>
<td><a href="http://hl7.org/fhir/2015Sep/index.html">DSTU2 1.0.0</a></td>
</tr>
</tbody>
</table>
@ -62,7 +74,7 @@
</p>
<p>
See <a href="#Using_Snapshot_Builds">using snapshot builds</a> below to find out
how to get these builds.S
how to get these builds.
</p>
</subsection>
@ -78,11 +90,10 @@
<subsection name="DSTU1 Resources - HAPI Structures">
<p>
At this time, the only official release of FHIR is the "DSTU1" release,
which is available by including the <code>hapi-fhir-structures-dstu-[version].jar</code>.
Note that the ballot process for the next release (DSTU2) has not yet been
completed, but many users are already targeting it for new development as it
is likely to be approved without significant changes.
HAPI supports multiple versions of the FHIR specification. Code which
needs to interact with or implement the DSTU1 specification (which is
now gradually beginning to be replaced by newer specifications) can do
so using <code>hapi-fhir-structures-dstu-[version].jar</code>.
</p>
<p>
If you use Maven, you can include these JARs with the following dependency tags
@ -137,6 +148,17 @@
<version>${hapi_stable_version}</version>
</dependency>]]></source>
<p>
If you want to use HAPI's
<a href="./doc_validation.html#structure_definition_validation">StructureDefinition validation</a>
you will also need to include the <code>hapi-fhir-validation-resources-[version].jar</code>:
</p>
<source><![CDATA[<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-validation-resources</artifactId>
<version>${hapi_stable_version}</version>
</dependency>]]></source>
</subsection>
</section>
@ -153,10 +175,16 @@
<source><![CDATA[compile 'ca.uhn.hapi.fhir:hapi-fhir-base:${hapi_stable_version}'
compile 'ca.uhn.hapi.fhir:hapi-fhir-structures-dstu:${hapi_stable_version}']]></source>
<p>
DSTU2:
DSTU2 (HAPI):
</p>
<source><![CDATA[compile 'ca.uhn.hapi.fhir:hapi-fhir-base:${hapi_stable_version}'
compile 'ca.uhn.hapi.fhir:hapi-fhir-structures-dstu2:${hapi_stable_version}']]></source>
<p>
DSTU2 (RI):
</p>
<source><![CDATA[compile 'ca.uhn.hapi.fhir:hapi-fhir-base:${hapi_stable_version}'
compile 'ca.uhn.hapi.fhir:hapi-fhir-structures-hl7org-dstu2:${hapi_stable_version}'
compile 'ca.uhn.hapi.fhir:hapi-fhir-validation-resources:${hapi_stable_version}']]></source>
</section>
<section name="Using Snapshot Builds">