Improve JPA response messages (#4293)

* Improve JPA response messages

* FIxes

* Test fixes

* Test fixes

* Ongoing testing

* Work on messages

* Work on messages

* Add valueset

* Add response code enum

* Version bump

* Undo bump

* Improve changelog

* Test fixes

* Add javadocs

* Version bump HAPI

* Test fixes

* Test fix

* Test fixes

* Test fixes

* Account for review changes

* Test fix

* Docs fix

* Work on API

* Improve SqlQuery api
This commit is contained in:
James Agnew 2022-11-28 07:52:12 -05:00 committed by GitHub
parent c3697a5f87
commit 50ca94eded
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
136 changed files with 3134 additions and 1082 deletions

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -22,10 +22,13 @@ package ca.uhn.fhir.context.support;
import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.util.BundleUtil; import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.ClasspathUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
@ -321,6 +324,19 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
} else { } else {
ourLog.warn("Unable to load resource: {}", theClasspath); ourLog.warn("Unable to load resource: {}", theClasspath);
} }
// Load built-in system
if (myCtx.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
String storageCodeEnum = ClasspathUtil.loadResource("org/hl7/fhir/common/hapi/validation/support/HapiFhirStorageResponseCode.json");
IBaseResource storageCodeCodeSystem = myCtx.newJsonParser().setParserErrorHandler(new LenientErrorHandler()).parseResource(storageCodeEnum);
String url = myCtx.newTerser().getSinglePrimitiveValueOrNull(storageCodeCodeSystem, "url");
theCodeSystems.put(url, storageCodeCodeSystem);
}
} }
private void loadStructureDefinitions(Map<String, IBaseResource> theCodeSystems, String theClasspath) { private void loadStructureDefinitions(Map<String, IBaseResource> theCodeSystems, String theClasspath) {

View File

@ -0,0 +1,29 @@
package ca.uhn.fhir.model.api;
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface ICodingEnum {
String getCode();
String getSystem();
String getDisplay();
}

View File

@ -0,0 +1,72 @@
package ca.uhn.fhir.model.api;
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* This enum contains the allowable codes in the HAPI FHIR defined
* codesystem: https://hapifhir.io/fhir/CodeSystem/hapi-fhir-storage-response-code
*
* This is used in CRUD response OperationOutcome resources.
*/
public enum StorageResponseCodeEnum implements ICodingEnum {
SUCCESSFUL_CREATE("Create succeeded."),
SUCCESSFUL_CREATE_NO_CONDITIONAL_MATCH("Conditional create succeeded: no existing resource matched the conditional URL."),
SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH("Conditional create succeeded: an existing resource matched the conditional URL so no action was taken."),
SUCCESSFUL_UPDATE("Update succeeded."),
SUCCESSFUL_UPDATE_AS_CREATE("Update as create succeeded."),
SUCCESSFUL_UPDATE_NO_CHANGE("Update succeeded: No changes were detected so no action was taken."),
SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH("Conditional update succeeded: no existing resource matched the conditional URL so a new resource was created."),
SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH("Conditional update succeeded: an existing resource matched the conditional URL and was updated."),
SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE("Conditional update succeeded: an existing resource matched the conditional URL and was updated, but no changes were detected so no action was taken."),
SUCCESSFUL_DELETE("Delete succeeded."),
SUCCESSFUL_DELETE_ALREADY_DELETED("Delete succeeded: Resource was already deleted so no action was taken."),
SUCCESSFUL_DELETE_NOT_FOUND("Delete succeeded: No existing resource was found so no action was taken."),
SUCCESSFUL_PATCH("Patch succeeded."),
SUCCESSFUL_PATCH_NO_CHANGE("Patch succeeded: No changes were detected so no action was taken."),
SUCCESSFUL_CONDITIONAL_PATCH("Conditional patch succeeded."),
SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE("Conditional patch succeeded: No changes were detected so no action was taken.");
public static final String SYSTEM = "https://hapifhir.io/fhir/CodeSystem/hapi-fhir-storage-response-code";
private final String myDisplay;
StorageResponseCodeEnum(String theDisplay) {
myDisplay = theDisplay;
}
@Override
public String getCode() {
return name();
}
@Override
public String getSystem() {
return SYSTEM;
}
@Override
public String getDisplay() {
return myDisplay;
}
}

View File

@ -29,9 +29,12 @@ import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBackboneElement; import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
import javax.annotation.Nonnull;
import java.util.Objects; import java.util.Objects;
/** /**
@ -40,7 +43,7 @@ import java.util.Objects;
* (method and search). * (method and search).
* *
* <p> * <p>
* * <p>
* This is not yet complete, and doesn't support all FHIR features. <b>USE WITH CAUTION</b> as the API * This is not yet complete, and doesn't support all FHIR features. <b>USE WITH CAUTION</b> as the API
* may change. * may change.
* *
@ -101,10 +104,8 @@ public class BundleBuilder {
/** /**
* Sets the specified primitive field on the bundle with the value provided. * Sets the specified primitive field on the bundle with the value provided.
* *
* @param theFieldName * @param theFieldName Name of the primitive field.
* Name of the primitive field. * @param theFieldValue Value of the field to be set.
* @param theFieldValue
* Value of the field to be set.
*/ */
public BundleBuilder setBundleField(String theFieldName, String theFieldValue) { public BundleBuilder setBundleField(String theFieldName, String theFieldValue) {
BaseRuntimeChildDefinition typeChild = myBundleDef.getChildByName(theFieldName); BaseRuntimeChildDefinition typeChild = myBundleDef.getChildByName(theFieldName);
@ -119,12 +120,9 @@ public class BundleBuilder {
/** /**
* Sets the specified primitive field on the search entry with the value provided. * Sets the specified primitive field on the search entry with the value provided.
* *
* @param theSearch * @param theSearch Search part of the entry
* Search part of the entry * @param theFieldName Name of the primitive field.
* @param theFieldName * @param theFieldValue Value of the field to be set.
* Name of the primitive field.
* @param theFieldValue
* Value of the field to be set.
*/ */
public BundleBuilder setSearchField(IBase theSearch, String theFieldName, String theFieldValue) { public BundleBuilder setSearchField(IBase theSearch, String theFieldName, String theFieldValue) {
BaseRuntimeChildDefinition typeChild = mySearchDef.getChildByName(theFieldName); BaseRuntimeChildDefinition typeChild = mySearchDef.getChildByName(theFieldName);
@ -144,6 +142,37 @@ public class BundleBuilder {
return this; return this;
} }
/**
* Adds a FHIRPatch patch bundle to the transaction
* @param theTarget The target resource ID to patch
* @param thePatch The FHIRPath Parameters resource
* @since 6.3.0
*/
public PatchBuilder addTransactionFhirPatchEntry(IIdType theTarget, IBaseParameters thePatch) {
Validate.notNull(theTarget, "theTarget must not be null");
Validate.notBlank(theTarget.getResourceType(), "theTarget must contain a resource type");
Validate.notBlank(theTarget.getIdPart(), "theTarget must contain an ID");
IPrimitiveType<?> url = addAndPopulateTransactionBundleEntryRequest(thePatch, theTarget.getValue(), theTarget.toUnqualifiedVersionless().getValue(), "PATCH");
return new PatchBuilder(url);
}
/**
* Adds a FHIRPatch patch bundle to the transaction. This method is intended for conditional PATCH operations. If you
* know the ID of the resource you wish to patch, use {@link #addTransactionFhirPatchEntry(IIdType, IBaseParameters)}
* instead.
*
* @param thePatch The FHIRPath Parameters resource
* @since 6.3.0
* @see #addTransactionFhirPatchEntry(IIdType, IBaseParameters)
*/
public PatchBuilder addTransactionFhirPatchEntry(IBaseParameters thePatch) {
IPrimitiveType<?> url = addAndPopulateTransactionBundleEntryRequest(thePatch, null, null, "PATCH");
return new PatchBuilder(url);
}
/** /**
* Adds an entry containing an update (PUT) request. * Adds an entry containing an update (PUT) request.
* Also sets the Bundle.type value to "transaction" if it is not already set. * Also sets the Bundle.type value to "transaction" if it is not already set.
@ -151,22 +180,39 @@ public class BundleBuilder {
* @param theResource The resource to update * @param theResource The resource to update
*/ */
public UpdateBuilder addTransactionUpdateEntry(IBaseResource theResource) { public UpdateBuilder addTransactionUpdateEntry(IBaseResource theResource) {
Validate.notNull(theResource, "theResource must not be null");
IIdType id = theResource.getIdElement();
if (id.hasIdPart() && !id.hasResourceType()) {
String resourceType = myContext.getResourceType(theResource);
id = id.withResourceType(resourceType);
}
String requestUrl = id.toUnqualifiedVersionless().getValue();
String fullUrl = id.getValue();
String verb = "PUT";
IPrimitiveType<?> url = addAndPopulateTransactionBundleEntryRequest(theResource, fullUrl, requestUrl, verb);
return new UpdateBuilder(url);
}
@Nonnull
private IPrimitiveType<?> addAndPopulateTransactionBundleEntryRequest(IBaseResource theResource, String theFullUrl, String theRequestUrl, String theHttpVerb) {
setBundleField("type", "transaction"); setBundleField("type", "transaction");
IBase request = addEntryAndReturnRequest(theResource); IBase request = addEntryAndReturnRequest(theResource, theFullUrl);
// Bundle.entry.request.url // Bundle.entry.request.url
IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance(); IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
String resourceType = myContext.getResourceType(theResource); url.setValueAsString(theRequestUrl);
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue());
myEntryRequestUrlChild.getMutator().setValue(request, url); myEntryRequestUrlChild.getMutator().setValue(request, url);
// Bundle.entry.request.url // Bundle.entry.request.method
IPrimitiveType<?> method = (IPrimitiveType<?>) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); IPrimitiveType<?> method = (IPrimitiveType<?>) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments());
method.setValueAsString("PUT"); method.setValueAsString(theHttpVerb);
myEntryRequestMethodChild.getMutator().setValue(request, method); myEntryRequestMethodChild.getMutator().setValue(request, method);
return url;
return new UpdateBuilder(url);
} }
/** /**
@ -178,7 +224,7 @@ public class BundleBuilder {
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) { public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) {
setBundleField("type", "transaction"); setBundleField("type", "transaction");
IBase request = addEntryAndReturnRequest(theResource); IBase request = addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
String resourceType = myContext.getResourceType(theResource); String resourceType = myContext.getResourceType(theResource);
@ -198,15 +244,30 @@ public class BundleBuilder {
/** /**
* Adds an entry containing a delete (DELETE) request. * Adds an entry containing a delete (DELETE) request.
* Also sets the Bundle.type value to "transaction" if it is not already set. * Also sets the Bundle.type value to "transaction" if it is not already set.
* * <p>
* Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry, * Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry,
* *
* @param theResource The resource to delete. * @param theResource The resource to delete.
*/ */
public void addTransactionDeleteEntry(IBaseResource theResource) { public DeleteBuilder addTransactionDeleteEntry(IBaseResource theResource) {
String resourceType = myContext.getResourceType(theResource); String resourceType = myContext.getResourceType(theResource);
String idPart = theResource.getIdElement().toUnqualifiedVersionless().getIdPart(); String idPart = theResource.getIdElement().toUnqualifiedVersionless().getIdPart();
addTransactionDeleteEntry(resourceType, idPart); return addTransactionDeleteEntry(resourceType, idPart);
}
/**
* Adds an entry containing a delete (DELETE) request.
* Also sets the Bundle.type value to "transaction" if it is not already set.
* <p>
* Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry,
*
* @param theResourceId The resource ID to delete.
* @return
*/
public DeleteBuilder addTransactionDeleteEntry(IIdType theResourceId) {
String resourceType = theResourceId.getResourceType();
String idPart = theResourceId.getIdPart();
return addTransactionDeleteEntry(resourceType, idPart);
} }
/** /**
@ -216,22 +277,43 @@ public class BundleBuilder {
* @param theResourceType The type resource to delete. * @param theResourceType The type resource to delete.
* @param theIdPart the ID of the resource to delete. * @param theIdPart the ID of the resource to delete.
*/ */
public void addTransactionDeleteEntry(String theResourceType, String theIdPart) { public DeleteBuilder addTransactionDeleteEntry(String theResourceType, String theIdPart) {
setBundleField("type", "transaction"); setBundleField("type", "transaction");
IBase request = addEntryAndReturnRequest();
IdDt idDt = new IdDt(theIdPart); IdDt idDt = new IdDt(theIdPart);
String deleteUrl = idDt.toUnqualifiedVersionless().withResourceType(theResourceType).getValue();
return addDeleteEntry(deleteUrl);
}
/**
* Adds an entry containing a delete (DELETE) request.
* Also sets the Bundle.type value to "transaction" if it is not already set.
*
* @param theMatchUrl The match URL, e.g. <code>Patient?identifier=http://foo|123</code>
* @since 6.3.0
*/
public BaseOperationBuilder addTransactionDeleteEntryConditional(String theMatchUrl) {
Validate.notBlank(theMatchUrl, "theMatchUrl must not be null or blank");
return addDeleteEntry(theMatchUrl);
}
@Nonnull
private DeleteBuilder addDeleteEntry(String theDeleteUrl) {
IBase request = addEntryAndReturnRequest();
// Bundle.entry.request.url // Bundle.entry.request.url
IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance(); IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
url.setValueAsString(idDt.toUnqualifiedVersionless().withResourceType(theResourceType).getValue()); url.setValueAsString(theDeleteUrl);
myEntryRequestUrlChild.getMutator().setValue(request, url); myEntryRequestUrlChild.getMutator().setValue(request, url);
// Bundle.entry.request.method // Bundle.entry.request.method
IPrimitiveType<?> method = (IPrimitiveType<?>) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); IPrimitiveType<?> method = (IPrimitiveType<?>) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments());
method.setValueAsString("DELETE"); method.setValueAsString("DELETE");
myEntryRequestMethodChild.getMutator().setValue(request, method); myEntryRequestMethodChild.getMutator().setValue(request, method);
}
return new DeleteBuilder();
}
/** /**
@ -239,14 +321,13 @@ public class BundleBuilder {
*/ */
public void addCollectionEntry(IBaseResource theResource) { public void addCollectionEntry(IBaseResource theResource) {
setType("collection"); setType("collection");
addEntryAndReturnRequest(theResource); addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
} }
/** /**
* Creates new entry and adds it to the bundle * Creates new entry and adds it to the bundle
* *
* @return * @return Returns the new entry.
* Returns the new entry.
*/ */
public IBase addEntry() { public IBase addEntry() {
IBase entry = myEntryDef.newInstance(); IBase entry = myEntryDef.newInstance();
@ -258,8 +339,7 @@ public class BundleBuilder {
* Creates new search instance for the specified entry * Creates new search instance for the specified entry
* *
* @param entry Entry to create search instance for * @param entry Entry to create search instance for
* @return * @return Returns the search instance
* Returns the search instance
*/ */
public IBaseBackboneElement addSearch(IBase entry) { public IBaseBackboneElement addSearch(IBase entry) {
IBase searchInstance = mySearchDef.newInstance(); IBase searchInstance = mySearchDef.newInstance();
@ -267,19 +347,14 @@ public class BundleBuilder {
return (IBaseBackboneElement) searchInstance; return (IBaseBackboneElement) searchInstance;
} }
/** private IBase addEntryAndReturnRequest(IBaseResource theResource, String theFullUrl) {
*
* @param theResource
* @return
*/
public IBase addEntryAndReturnRequest(IBaseResource theResource) {
Validate.notNull(theResource, "theResource must not be null"); Validate.notNull(theResource, "theResource must not be null");
IBase entry = addEntry(); IBase entry = addEntry();
// Bundle.entry.fullUrl // Bundle.entry.fullUrl
IPrimitiveType<?> fullUrl = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance(); IPrimitiveType<?> fullUrl = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
fullUrl.setValueAsString(theResource.getIdElement().getValue()); fullUrl.setValueAsString(theFullUrl);
myEntryFullUrlChild.getMutator().setValue(entry, fullUrl); myEntryFullUrlChild.getMutator().setValue(entry, fullUrl);
// Bundle.entry.resource // Bundle.entry.resource
@ -306,6 +381,15 @@ public class BundleBuilder {
return myBundle; return myBundle;
} }
/**
* Convenience method which auto-casts the results of {@link #getBundle()}
*
* @since 6.3.0
*/
public <T extends IBaseBundle> T getBundleTyped() {
return (T) myBundle;
}
public BundleBuilder setMetaField(String theFieldName, IBase theFieldValue) { public BundleBuilder setMetaField(String theFieldName, IBase theFieldValue) {
BaseRuntimeChildDefinition.IMutator mutator = myMetaDef.getChildByName(theFieldName).getMutator(); BaseRuntimeChildDefinition.IMutator mutator = myMetaDef.getChildByName(theFieldName).getMutator();
mutator.setValue(myBundle.getMeta(), theFieldValue); mutator.setValue(myBundle.getMeta(), theFieldValue);
@ -315,12 +399,9 @@ public class BundleBuilder {
/** /**
* Sets the specified entry field. * Sets the specified entry field.
* *
* @param theEntry * @param theEntry The entry instance to set values on
* The entry instance to set values on * @param theEntryChildName The child field name of the entry instance to be set
* @param theEntryChildName * @param theValue The field value to set
* The child field name of the entry instance to be set
* @param theValue
* The field value to set
*/ */
public void addToEntry(IBase theEntry, String theEntryChildName, IBase theValue) { public void addToEntry(IBase theEntry, String theEntryChildName, IBase theValue) {
addToBase(theEntry, theEntryChildName, theValue, myEntryDef); addToBase(theEntry, theEntryChildName, theValue, myEntryDef);
@ -329,12 +410,9 @@ public class BundleBuilder {
/** /**
* Sets the specified search field. * Sets the specified search field.
* *
* @param theSearch * @param theSearch The search instance to set values on
* The search instance to set values on * @param theSearchFieldName The child field name of the search instance to be set
* @param theSearchFieldName * @param theSearchFieldValue The field value to set
* The child field name of the search instance to be set
* @param theSearchFieldValue
* The field value to set
*/ */
public void addToSearch(IBase theSearch, String theSearchFieldName, IBase theSearchFieldValue) { public void addToSearch(IBase theSearch, String theSearchFieldName, IBase theSearchFieldValue) {
addToBase(theSearch, theSearchFieldName, theSearchFieldValue, mySearchDef); addToBase(theSearch, theSearchFieldName, theSearchFieldValue, mySearchDef);
@ -349,12 +427,9 @@ public class BundleBuilder {
/** /**
* Creates a new primitive. * Creates a new primitive.
* *
* @param theTypeName * @param theTypeName The element type for the primitive
* The element type for the primitive * @param <T> Actual type of the parameterized primitive type interface
* @param <T> * @return Returns the new empty instance of the element definition.
* Actual type of the parameterized primitive type interface
* @return
* Returns the new empty instance of the element definition.
*/ */
public <T> IPrimitiveType<T> newPrimitive(String theTypeName) { public <T> IPrimitiveType<T> newPrimitive(String theTypeName) {
BaseRuntimeElementDefinition primitiveDefinition = myContext.getElementDefinition(theTypeName); BaseRuntimeElementDefinition primitiveDefinition = myContext.getElementDefinition(theTypeName);
@ -365,14 +440,10 @@ public class BundleBuilder {
/** /**
* Creates a new primitive instance of the specified element type. * Creates a new primitive instance of the specified element type.
* *
* @param theTypeName * @param theTypeName Element type to create
* Element type to create * @param theInitialValue Initial value to be set on the new instance
* @param theInitialValue * @param <T> Actual type of the parameterized primitive type interface
* Initial value to be set on the new instance * @return Returns the newly created instance
* @param <T>
* Actual type of the parameterized primitive type interface
* @return
* Returns the newly created instance
*/ */
public <T> IPrimitiveType<T> newPrimitive(String theTypeName, T theInitialValue) { public <T> IPrimitiveType<T> newPrimitive(String theTypeName, T theInitialValue) {
IPrimitiveType<T> retVal = newPrimitive(theTypeName); IPrimitiveType<T> retVal = newPrimitive(theTypeName);
@ -389,38 +460,84 @@ public class BundleBuilder {
setBundleField("type", theType); setBundleField("type", theType);
} }
public static class UpdateBuilder {
private final IPrimitiveType<?> myUrl; public class DeleteBuilder extends BaseOperationBuilder {
// nothing yet
public UpdateBuilder(IPrimitiveType<?> theUrl) {
myUrl = theUrl;
} }
/**
* Make this update a Conditional Update public class PatchBuilder extends BaseOperationBuilderWithConditionalUrl<PatchBuilder> {
*/
public void conditional(String theConditionalUrl) { PatchBuilder(IPrimitiveType<?> theUrl) {
myUrl.setValueAsString(theConditionalUrl); super(theUrl);
}
} }
public class CreateBuilder { }
public class UpdateBuilder extends BaseOperationBuilderWithConditionalUrl<UpdateBuilder> {
UpdateBuilder(IPrimitiveType<?> theUrl) {
super(theUrl);
}
}
public class CreateBuilder extends BaseOperationBuilder {
private final IBase myRequest; private final IBase myRequest;
public CreateBuilder(IBase theRequest) { CreateBuilder(IBase theRequest) {
myRequest = theRequest; myRequest = theRequest;
} }
/** /**
* Make this create a Conditional Create * Make this create a Conditional Create
*/ */
public void conditional(String theConditionalUrl) { public CreateBuilder conditional(String theConditionalUrl) {
BaseRuntimeElementDefinition<?> stringDefinition = Objects.requireNonNull(myContext.getElementDefinition("string")); BaseRuntimeElementDefinition<?> stringDefinition = Objects.requireNonNull(myContext.getElementDefinition("string"));
IPrimitiveType<?> ifNoneExist = (IPrimitiveType<?>) stringDefinition.newInstance(); IPrimitiveType<?> ifNoneExist = (IPrimitiveType<?>) stringDefinition.newInstance();
ifNoneExist.setValueAsString(theConditionalUrl); ifNoneExist.setValueAsString(theConditionalUrl);
myEntryRequestIfNoneExistChild.getMutator().setValue(myRequest, ifNoneExist); myEntryRequestIfNoneExistChild.getMutator().setValue(myRequest, ifNoneExist);
return this;
}
}
public abstract class BaseOperationBuilder {
/**
* Returns a reference to the BundleBuilder instance.
*
* Calling this method has no effect at all, it is only
* provided for easy method chaning if you want to build
* your bundle as a single fluent call.
*
* @since 6.3.0
*/
public BundleBuilder andThen() {
return BundleBuilder.this;
}
}
public abstract class BaseOperationBuilderWithConditionalUrl<T extends BaseOperationBuilder> extends BaseOperationBuilder {
private final IPrimitiveType<?> myUrl;
BaseOperationBuilderWithConditionalUrl(IPrimitiveType<?> theUrl) {
myUrl = theUrl;
}
/**
* Make this update a Conditional Update
*/
@SuppressWarnings("unchecked")
public T conditional(String theConditionalUrl) {
myUrl.setValueAsString(theConditionalUrl);
return (T) this;
} }
} }

View File

@ -29,11 +29,13 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.ICompositeType; import org.hl7.fhir.instance.model.api.ICompositeType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
import javax.annotation.Nullable;
import java.util.List; import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -53,8 +55,12 @@ public class OperationOutcomeUtil {
* @return Returns the newly added issue * @return Returns the newly added issue
*/ */
public static IBase addIssue(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome, String theSeverity, String theDetails, String theLocation, String theCode) { public static IBase addIssue(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome, String theSeverity, String theDetails, String theLocation, String theCode) {
return addIssue(theCtx, theOperationOutcome, theSeverity, theDetails, theLocation, theCode, null, null, null);
}
public static IBase addIssue(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome, String theSeverity, String theDetails, String theLocation, String theCode, @Nullable String theDetailSystem, @Nullable String theDetailCode, @Nullable String theDetailDescription) {
IBase issue = createIssue(theCtx, theOperationOutcome); IBase issue = createIssue(theCtx, theOperationOutcome);
populateDetails(theCtx, issue, theSeverity, theDetails, theLocation, theCode); populateDetails(theCtx, issue, theSeverity, theDetails, theLocation, theCode, theDetailSystem, theDetailCode, theDetailDescription);
return issue; return issue;
} }
@ -127,17 +133,17 @@ public class OperationOutcomeUtil {
} }
} }
private static void populateDetails(FhirContext theCtx, IBase theIssue, String theSeverity, String theDetails, String theLocation, String theCode) { private static void populateDetails(FhirContext theCtx, IBase theIssue, String theSeverity, String theDetails, String theLocation, String theCode, String theDetailSystem, String theDetailCode, String theDetailDescription) {
BaseRuntimeElementCompositeDefinition<?> issueElement = (BaseRuntimeElementCompositeDefinition<?>) theCtx.getElementDefinition(theIssue.getClass()); BaseRuntimeElementCompositeDefinition<?> issueElement = (BaseRuntimeElementCompositeDefinition<?>) theCtx.getElementDefinition(theIssue.getClass());
BaseRuntimeChildDefinition detailsChild; BaseRuntimeChildDefinition diagnosticsChild;
detailsChild = issueElement.getChildByName("diagnostics"); diagnosticsChild = issueElement.getChildByName("diagnostics");
BaseRuntimeChildDefinition codeChild = issueElement.getChildByName("code"); BaseRuntimeChildDefinition codeChild = issueElement.getChildByName("code");
IPrimitiveType<?> codeElem = (IPrimitiveType<?>) codeChild.getChildByName("code").newInstance(codeChild.getInstanceConstructorArguments()); IPrimitiveType<?> codeElem = (IPrimitiveType<?>) codeChild.getChildByName("code").newInstance(codeChild.getInstanceConstructorArguments());
codeElem.setValueAsString(theCode); codeElem.setValueAsString(theCode);
codeChild.getMutator().addValue(theIssue, codeElem); codeChild.getMutator().addValue(theIssue, codeElem);
BaseRuntimeElementDefinition<?> stringDef = detailsChild.getChildByName(detailsChild.getElementName()); BaseRuntimeElementDefinition<?> stringDef = diagnosticsChild.getChildByName(diagnosticsChild.getElementName());
BaseRuntimeChildDefinition severityChild = issueElement.getChildByName("severity"); BaseRuntimeChildDefinition severityChild = issueElement.getChildByName("severity");
IPrimitiveType<?> severityElem = (IPrimitiveType<?>) severityChild.getChildByName("severity").newInstance(severityChild.getInstanceConstructorArguments()); IPrimitiveType<?> severityElem = (IPrimitiveType<?>) severityChild.getChildByName("severity").newInstance(severityChild.getInstanceConstructorArguments());
@ -146,9 +152,27 @@ public class OperationOutcomeUtil {
IPrimitiveType<?> string = (IPrimitiveType<?>) stringDef.newInstance(); IPrimitiveType<?> string = (IPrimitiveType<?>) stringDef.newInstance();
string.setValueAsString(theDetails); string.setValueAsString(theDetails);
detailsChild.getMutator().setValue(theIssue, string); diagnosticsChild.getMutator().setValue(theIssue, string);
addLocationToIssue(theCtx, theIssue, theLocation); addLocationToIssue(theCtx, theIssue, theLocation);
if (isNotBlank(theDetailSystem)) {
BaseRuntimeChildDefinition detailsChild = issueElement.getChildByName("details");
if (detailsChild != null) {
BaseRuntimeElementDefinition<?> codeableConceptDef = theCtx.getElementDefinition("CodeableConcept");
IBase codeableConcept = codeableConceptDef.newInstance();
BaseRuntimeElementDefinition<?> codingDef = theCtx.getElementDefinition("Coding");
IBaseCoding coding = (IBaseCoding) codingDef.newInstance();
coding.setSystem(theDetailSystem);
coding.setCode(theDetailCode);
coding.setDisplay(theDetailDescription);
codeableConceptDef.getChildByName("coding").getMutator().addValue(codeableConcept, coding);
detailsChild.getMutator().addValue(theIssue, codeableConcept);
}
}
} }
public static void addLocationToIssue(FhirContext theContext, IBase theIssue, String theLocation) { public static void addLocationToIssue(FhirContext theContext, IBase theIssue, String theLocation) {

View File

@ -99,10 +99,22 @@ ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidParameterChain=Invalid parameter chain
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidVersion=Version "{0}" is not valid for resource {1} ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidVersion=Version "{0}" is not valid for resource {1}
ca.uhn.fhir.jpa.dao.BaseStorageDao.multipleParamsWithSameNameOneIsMissingTrue=This server does not know how to handle multiple "{0}" parameters where one has a value of :missing=true ca.uhn.fhir.jpa.dao.BaseStorageDao.multipleParamsWithSameNameOneIsMissingTrue=This server does not know how to handle multiple "{0}" parameters where one has a value of :missing=true
ca.uhn.fhir.jpa.dao.BaseStorageDao.missingBody=No body was supplied in request ca.uhn.fhir.jpa.dao.BaseStorageDao.missingBody=No body was supplied in request
ca.uhn.fhir.jpa.dao.BaseStorageDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Deletion failed. ca.uhn.fhir.jpa.dao.BaseStorageDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Nothing has been deleted.
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreate=Successfully created resource "{0}" in {1}ms ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreate=Successfully created resource "{0}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdate=Successfully updated resource "{0}" in {1}ms ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreateConditionalNoMatch=Successfully conditionally created resource "{0}". No existing resources matched URL "{1}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulDeletes=Successfully deleted {0} resource(s) in {1}ms ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreateConditionalWithMatch=Successfully conditionally created resource "{0}". Existing resource matched URL "{1}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatch=Successfully patched resource "{0}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatchNoChange=Successfully patched resource "{0}" with no changes detected.
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatchConditional=Successfully conditionally patched resource. Existing resource {0} matched URL: {1}.
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatchConditionalNoChange=Successfully conditionally patched resource with no changes detected. Existing resource {0} matched URL: {1}.
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdate=Successfully updated resource "{0}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateNoChange=Successfully updated resource "{0}" with no changes detected.
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateAsCreate=Successfully created resource "{0}" using update as create (ie. create with client assigned ID).
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateConditionalWithMatch=Successfully conditionally updated resource "{0}". Existing resource matched URL "{1}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateConditionalNoMatch=Successfully conditionally updated resource "{0}". Created resource because no existing resource matched URL "{1}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateConditionalNoChangeWithMatch=Successfully conditionally updated resource "{0}" with no changes detected. Existing resource matched URL "{1}".
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulDeletes=Successfully deleted {0} resource(s).
ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulTimingSuffix=Took {0}ms.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceNotExisting=Not deleted, resource {0} does not exist. ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceNotExisting=Not deleted, resource {0} does not exist.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceAlreadyDeleted=Not deleted, resource {0} was already deleted. ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceAlreadyDeleted=Not deleted, resource {0} was already deleted.
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2} ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath> <relativePath>../../hapi-deployable-pom</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -26,6 +26,11 @@ import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import java.math.BigDecimal; import java.math.BigDecimal;
@ -109,6 +114,53 @@ public class BundleBuilderExamples {
//END SNIPPET: createConditional //END SNIPPET: createConditional
} }
public void patch() throws FHIRException {
//START SNIPPET: patch
// Create a FHIR Patch object
Parameters patch = new Parameters();
Parameters.ParametersParameterComponent op = patch.addParameter().setName("operation");
op.addPart().setName("type").setValue(new CodeType("replace"));
op.addPart().setName("path").setValue(new CodeType("Patient.active"));
op.addPart().setName("value").setValue(new BooleanType(false));
// Create a TransactionBuilder
BundleBuilder builder = new BundleBuilder(myFhirContext);
// Create a target object (this is the ID of the resource that will be patched)
IIdType targetId = new IdType("Patient/123");
// Add the patch to the bundle
builder.addTransactionFhirPatchEntry(targetId, patch);
// Execute the transaction
IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute();
//END SNIPPET: patch
}
public void patchConditional() throws FHIRException {
//START SNIPPET: patchConditional
// Create a FHIR Patch object
Parameters patch = new Parameters();
Parameters.ParametersParameterComponent op = patch.addParameter().setName("operation");
op.addPart().setName("type").setValue(new CodeType("replace"));
op.addPart().setName("path").setValue(new CodeType("Patient.active"));
op.addPart().setName("value").setValue(new BooleanType(false));
// Create a TransactionBuilder
BundleBuilder builder = new BundleBuilder(myFhirContext);
// Add the patch to the bundle with a conditional URL
String conditionalUrl = "Patient?identifier=http://foo|123";
builder.addTransactionFhirPatchEntry(patch).conditional(conditionalUrl);
// Execute the transaction
IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute();
//END SNIPPET: patchConditional
}
public void customizeBundle() throws FHIRException { public void customizeBundle() throws FHIRException {
//START SNIPPET: customizeBundle //START SNIPPET: customizeBundle
// Create a TransactionBuilder // Create a TransactionBuilder

View File

@ -0,0 +1,6 @@
---
type: add
issue: 4293
title: "The BundleBuilder now supports adding conditional
DELETE operations, PATCH operations, and conditional PATCH
operations to a transaction bundle."

View File

@ -0,0 +1,11 @@
---
type: add
issue: 4293
title: "When performing create/update/patch/delete operations against the JPA server, the response
OperationOutcome will now include additional details about the outcome of the operation. This
includes:
<ul>
<li>For updates, the message will indicate the the update did not contain any changes (i.e. a No-op)</li>
<li>For conditional creates/updates/deletes, the message will indicate whether the conditional URL matched any existing resources and the outcome of the operation.</li>
<li>A new coding has been added to the <code>OperationOutcome.issue.details.coding</code> containing a machine processable equivalent to the outcome.</li>
</ul>"

View File

@ -0,0 +1,7 @@
---
type: add
issue: 4293
title: "When updating resources using a FHIR transaction in the JPA server, if the
client instructs the server to include the resource body in the response, any
tags that have been carried forward from previous versions of the resource are
now included in the response."

View File

@ -36,7 +36,23 @@ If you want to perform a conditional update:
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java|updateConditional}} {{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java|updateConditional}}
``` ```
# Customizing bundle # Transaction Patch
To add a PATCH operation to a transaction bundle:
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java|patch}}
```
## Conditional Patch
If you want to perform a conditional patch:
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java|patchConditional}}
```
# Customizing the Bundle
If you want to manipulate a bundle: If you want to manipulate a bundle:

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -39,14 +39,16 @@ public class SqlQuery {
private final StackTraceElement[] myStackTrace; private final StackTraceElement[] myStackTrace;
private final int mySize; private final int mySize;
private final LanguageEnum myLanguage; private final LanguageEnum myLanguage;
private final String myNamespace;
public SqlQuery(String theSql, List<String> theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize) { public SqlQuery(String theSql, List<String> theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize) {
this(theSql, theParams, theQueryTimestamp, theElapsedTime, theStackTraceElements, theSize, LanguageEnum.SQL); this(null, theSql, theParams, theQueryTimestamp, theElapsedTime, theStackTraceElements, theSize, LanguageEnum.SQL);
} }
public SqlQuery(String theSql, List<String> theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, LanguageEnum theLanguage) { public SqlQuery(String theNamespace, String theSql, List<String> theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, LanguageEnum theLanguage) {
Validate.notNull(theLanguage, "theLanguage must not be null"); Validate.notNull(theLanguage, "theLanguage must not be null");
myNamespace = theNamespace;
mySql = theSql; mySql = theSql;
myParams = Collections.unmodifiableList(theParams); myParams = Collections.unmodifiableList(theParams);
myQueryTimestamp = theQueryTimestamp; myQueryTimestamp = theQueryTimestamp;
@ -56,6 +58,10 @@ public class SqlQuery {
myLanguage = theLanguage; myLanguage = theLanguage;
} }
public String getNamespace() {
return myNamespace;
}
public long getQueryTimestamp() { public long getQueryTimestamp() {
return myQueryTimestamp; return myQueryTimestamp;
} }
@ -118,6 +124,10 @@ public class SqlQuery {
return mySize; return mySize;
} }
@Override
public String toString() {
return getSql(true, true);
}
public enum LanguageEnum { public enum LanguageEnum {
@ -125,9 +135,4 @@ public class SqlQuery {
JSON JSON
} }
@Override
public String toString() {
return getSql(true, true);
}
} }

View File

@ -384,7 +384,7 @@ public class TestUtil {
} }
public static void sleepOneClick() { public static void sleepOneClick() {
ca.uhn.fhir.util.TestUtil.sleepAtLeast(1); ca.uhn.fhir.util.TestUtil.sleepAtLeast(1, false);
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -31,6 +31,7 @@ import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSqlBuilder; import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSqlBuilder;
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSvcImpl; import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSvcImpl;
import ca.uhn.fhir.jpa.reindex.Batch2DaoSvcImpl; import ca.uhn.fhir.jpa.reindex.Batch2DaoSvcImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
@ -43,7 +44,7 @@ public class Batch2SupportConfig {
} }
@Bean @Bean
public IDeleteExpungeSvc deleteExpungeSvc(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, IFulltextSearchSvc theFullTextSearchSvc) { public IDeleteExpungeSvc deleteExpungeSvc(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) {
return new DeleteExpungeSvcImpl(theEntityManager, theDeleteExpungeSqlBuilder, theFullTextSearchSvc); return new DeleteExpungeSvcImpl(theEntityManager, theDeleteExpungeSqlBuilder, theFullTextSearchSvc);
} }

View File

@ -25,7 +25,9 @@ import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
import ca.uhn.fhir.jpa.dao.HistoryBuilder; import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory; import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService; import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc; import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
@ -230,7 +232,6 @@ public class JpaConfig {
return new ResponseTerminologyTranslationInterceptor(theValidationSupport, theResponseTerminologyTranslationSvc); return new ResponseTerminologyTranslationInterceptor(theValidationSupport, theResponseTerminologyTranslationSvc);
} }
@Lazy
@Bean @Bean
public ResponseTerminologyTranslationSvc responseTerminologyTranslationSvc(IValidationSupport theValidationSupport) { public ResponseTerminologyTranslationSvc responseTerminologyTranslationSvc(IValidationSupport theValidationSupport) {
return new ResponseTerminologyTranslationSvc(theValidationSupport); return new ResponseTerminologyTranslationSvc(theValidationSupport);
@ -265,6 +266,11 @@ public class JpaConfig {
return new ValueSetOperationProvider(); return new ValueSetOperationProvider();
} }
@Bean
public IJpaStorageResourceParser jpaStorageResourceParser() {
return new JpaStorageResourceParser();
}
@Bean @Bean
public TransactionProcessor transactionProcessor() { public TransactionProcessor transactionProcessor() {
return new TransactionProcessor(); return new TransactionProcessor();

View File

@ -16,6 +16,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.dao.IJpaDao; import ca.uhn.fhir.jpa.api.dao.IJpaDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
@ -29,8 +30,6 @@ import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService; import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
@ -45,11 +44,9 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData; import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.searchparam.extractor.LogicalReferenceHelper; import ca.uhn.fhir.jpa.searchparam.extractor.LogicalReferenceHelper;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams; import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
@ -60,6 +57,7 @@ import ca.uhn.fhir.jpa.util.AddRemoveCount;
import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
import ca.uhn.fhir.model.api.Tag; import ca.uhn.fhir.model.api.Tag;
import ca.uhn.fhir.model.api.TagList; import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.base.composite.BaseCodingDt; import ca.uhn.fhir.model.base.composite.BaseCodingDt;
@ -71,14 +69,12 @@ import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler; import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum; import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.param.HistorySearchStyleEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
@ -86,6 +82,8 @@ import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.HapiExtensions; import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.MetaUtil; import ca.uhn.fhir.util.MetaUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.XmlUtil; import ca.uhn.fhir.util.XmlUtil;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
@ -148,11 +146,9 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import java.util.UUID;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME;
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -181,9 +177,18 @@ import static org.apache.commons.lang3.StringUtils.trim;
* #L% * #L%
*/ */
/**
* TODO: JA - This class has only one subclass now. Historically it was a common
* ancestor for BaseHapiFhirSystemDao and BaseHapiFhirResourceDao but I've untangled
* the former from this hierarchy in order to simplify moving common functionality
* for resource DAOs into the hapi-fhir-storage project. This class should be merged
* into BaseHapiFhirResourceDao, but that should be done in its own dedicated PR
* since it'll be a noisy change.
*/
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
@Repository @Repository
public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStorageDao implements IDao, IJpaDao<T>, ApplicationContextAware { public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStorageResourceDao<T> implements IDao, IJpaDao<T>, ApplicationContextAware {
public static final long INDEX_STATUS_INDEXED = 1L; public static final long INDEX_STATUS_INDEXED = 1L;
public static final long INDEX_STATUS_INDEXING_FAILED = 2L; public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
@ -233,8 +238,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
@Autowired @Autowired
private PartitionSettings myPartitionSettings; private PartitionSettings myPartitionSettings;
@Autowired @Autowired
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
@Autowired
private IPartitionLookupSvc myPartitionLookupSvc; private IPartitionLookupSvc myPartitionLookupSvc;
@Autowired @Autowired
private MemoryCacheService myMemoryCacheService; private MemoryCacheService myMemoryCacheService;
@ -243,6 +246,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
@Autowired @Autowired
private PlatformTransactionManager myTransactionManager; private PlatformTransactionManager myTransactionManager;
@Autowired
protected IJpaStorageResourceParser myJpaStorageResourceParser;
@VisibleForTesting @VisibleForTesting
public void setSearchParamPresenceSvc(ISearchParamPresenceSvc theSearchParamPresenceSvc) { public void setSearchParamPresenceSvc(ISearchParamPresenceSvc theSearchParamPresenceSvc) {
@ -371,14 +376,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
myContext = theContext; myContext = theContext;
} }
public FhirContext getContext(FhirVersionEnum theVersion) {
Validate.notNull(theVersion, "theVersion must not be null");
if (theVersion == myFhirContext.getVersion().getVersion()) {
return myFhirContext;
}
return FhirContext.forCached(theVersion);
}
/** /**
* <code>null</code> will only be returned if the scheme and tag are both blank * <code>null</code> will only be returned if the scheme and tag are both blank
*/ */
@ -513,27 +510,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return retVal; return retVal;
} }
protected IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset) {
return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null);
}
protected IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType) {
String resourceName = defaultIfBlank(theResourceType, null);
Search search = new Search();
search.setOffset(theOffset);
search.setDeleted(false);
search.setCreated(new Date());
search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive);
search.setUuid(UUID.randomUUID().toString());
search.setResourceType(resourceName);
search.setResourceId(theResourcePid);
search.setSearchType(SearchTypeEnum.HISTORY);
search.setStatus(SearchStatusEnum.FINISHED);
search.setHistorySearchStyle(searchParameterType);
return myPersistedJpaBundleProviderFactory.newInstance(theRequest, search);
}
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) { void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
String newVersion; String newVersion;
@ -796,133 +772,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return !allTagsOld.equals(allTagsNew); return !allTagsOld.equals(allTagsNew);
} }
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
retVal = (R) res;
ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
if (theForHistoryOperation) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.DELETE);
}
} else if (theForHistoryOperation) {
/*
* If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
*/
Date published = theEntity.getPublished().getValue();
Date updated = theEntity.getUpdated().getValue();
if (published.equals(updated)) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.POST);
} else {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.PUT);
}
}
res.setId(theEntity.getIdDt().withVersion(theVersion.toString()));
ResourceMetadataKeyEnum.VERSION.put(res, Long.toString(theEntity.getVersion()));
ResourceMetadataKeyEnum.PUBLISHED.put(res, theEntity.getPublished());
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
if (theTagList != null) {
if (theEntity.isHasTags()) {
TagList tagList = new TagList();
List<IBaseCoding> securityLabels = new ArrayList<>();
List<IdDt> profiles = new ArrayList<>();
for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) {
case PROFILE:
profiles.add(new IdDt(next.getTag().getCode()));
break;
case SECURITY_LABEL:
IBaseCoding secLabel = (IBaseCoding) myContext.getVersion().newCodingDt();
secLabel.setSystem(next.getTag().getSystem());
secLabel.setCode(next.getTag().getCode());
secLabel.setDisplay(next.getTag().getDisplay());
securityLabels.add(secLabel);
break;
case TAG:
tagList.add(new Tag(next.getTag().getSystem(), next.getTag().getCode(), next.getTag().getDisplay()));
break;
}
}
if (tagList.size() > 0) {
ResourceMetadataKeyEnum.TAG_LIST.put(res, tagList);
}
if (securityLabels.size() > 0) {
ResourceMetadataKeyEnum.SECURITY_LABELS.put(res, toBaseCodingList(securityLabels));
}
if (profiles.size() > 0) {
ResourceMetadataKeyEnum.PROFILES.put(res, profiles);
}
}
}
return retVal;
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
retVal = (R) res;
ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
if (theForHistoryOperation) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, HTTPVerb.DELETE.toCode());
}
} else if (theForHistoryOperation) {
/*
* If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
*/
Date published = theEntity.getPublished().getValue();
Date updated = theEntity.getUpdated().getValue();
if (published.equals(updated)) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, HTTPVerb.POST.toCode());
} else {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, HTTPVerb.PUT.toCode());
}
}
res.getMeta().setLastUpdated(null);
res.getMeta().setVersionId(null);
updateResourceMetadata(theEntity, res);
res.setId(res.getIdElement().withVersion(theVersion.toString()));
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
if (theTagList != null) {
res.getMeta().getTag().clear();
res.getMeta().getProfile().clear();
res.getMeta().getSecurity().clear();
for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) {
case PROFILE:
res.getMeta().addProfile(next.getTag().getCode());
break;
case SECURITY_LABEL:
IBaseCoding sec = res.getMeta().addSecurity();
sec.setSystem(next.getTag().getSystem());
sec.setCode(next.getTag().getCode());
sec.setDisplay(next.getTag().getDisplay());
break;
case TAG:
IBaseCoding tag = res.getMeta().addTag();
tag.setSystem(next.getTag().getSystem());
tag.setCode(next.getTag().getCode());
tag.setDisplay(next.getTag().getDisplay());
break;
}
}
}
return retVal;
}
/** /**
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database * Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
* *
@ -954,6 +803,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// nothing // nothing
} }
@Override
@CoverageIgnore @CoverageIgnore
public BaseHasResource readEntity(IIdType theValueId, RequestDetails theRequest) { public BaseHasResource readEntity(IIdType theValueId, RequestDetails theRequest) {
throw new NotImplementedException(Msg.code(927) + ""); throw new NotImplementedException(Msg.code(927) + "");
@ -1005,220 +855,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return metaSnapshotModeTokens.contains(theTag.getTag().getTagType()); return metaSnapshotModeTokens.contains(theTag.getTag().getTagType());
} }
@Override
public IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
Class<? extends IBaseResource> resourceType = type.getImplementingClass();
return toResource(resourceType, theEntity, null, theForHistoryOperation);
}
@SuppressWarnings("unchecked")
@Override
public <R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation) {
// 1. get resource, it's encoding and the tags if any
byte[] resourceBytes;
String resourceText;
ResourceEncodingEnum resourceEncoding;
@Nullable
Collection<? extends BaseTag> tagList = Collections.emptyList();
long version;
String provenanceSourceUri = null;
String provenanceRequestId = null;
if (theEntity instanceof ResourceHistoryTable) {
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource();
resourceText = history.getResourceTextVc();
resourceEncoding = history.getEncoding();
switch (getConfig().getTagStorageMode()) {
case VERSIONED:
default:
if (history.isHasTags()) {
tagList = history.getTags();
}
break;
case NON_VERSIONED:
if (history.getResourceTable().isHasTags()) {
tagList = history.getResourceTable().getTags();
}
break;
case INLINE:
tagList = null;
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
provenanceSourceUri = history.getProvenance().getSourceUri();
}
} else if (theEntity instanceof ResourceTable) {
ResourceTable resource = (ResourceTable) theEntity;
ResourceHistoryTable history;
if (resource.getCurrentVersionEntity() != null) {
history = resource.getCurrentVersionEntity();
} else {
version = theEntity.getVersion();
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
((ResourceTable) theEntity).setCurrentVersionEntity(history);
while (history == null) {
if (version > 1L) {
version--;
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
} else {
return null;
}
}
}
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
resourceText = history.getResourceTextVc();
switch (getConfig().getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (resource.isHasTags()) {
tagList = resource.getTags();
} else {
tagList = Collections.emptyList();
}
break;
case INLINE:
tagList = null;
break;
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
provenanceSourceUri = history.getProvenance().getSourceUri();
}
} else if (theEntity instanceof ResourceSearchView) {
// This is the search View
ResourceSearchView view = (ResourceSearchView) theEntity;
resourceBytes = view.getResource();
resourceText = view.getResourceTextVc();
resourceEncoding = view.getEncoding();
version = view.getVersion();
provenanceRequestId = view.getProvenanceRequestId();
provenanceSourceUri = view.getProvenanceSourceUri();
switch (getConfig().getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (theTagList != null) {
tagList = theTagList;
} else {
tagList = Collections.emptyList();
}
break;
case INLINE:
tagList = null;
break;
}
} else {
// something wrong
return null;
}
// 2. get The text
String decodedResourceText;
if (resourceText != null) {
decodedResourceText = resourceText;
} else {
decodedResourceText = decodeResource(resourceBytes, resourceEncoding);
}
// 3. Use the appropriate custom type if one is specified in the context
Class<R> resourceType = theResourceType;
if (tagList != null) {
if (myContext.hasDefaultTypeForProfile()) {
for (BaseTag nextTag : tagList) {
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
String profile = nextTag.getTag().getCode();
if (isNotBlank(profile)) {
Class<? extends IBaseResource> newType = myContext.getDefaultTypeForProfile(profile);
if (newType != null && theResourceType.isAssignableFrom(newType)) {
ourLog.debug("Using custom type {} for profile: {}", newType.getName(), profile);
resourceType = (Class<R>) newType;
break;
}
}
}
}
}
}
// 4. parse the text to FHIR
R retVal;
if (resourceEncoding != ResourceEncodingEnum.DEL) {
LenientErrorHandler errorHandler = new LenientErrorHandler(false).setErrorOnInvalidValue(false);
IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), errorHandler, theEntity.getId());
try {
retVal = parser.parseResource(resourceType, decodedResourceText);
} catch (Exception e) {
StringBuilder b = new StringBuilder();
b.append("Failed to parse database resource[");
b.append(myFhirContext.getResourceType(resourceType));
b.append("/");
b.append(theEntity.getIdDt().getIdPart());
b.append(" (pid ");
b.append(theEntity.getId());
b.append(", version ");
b.append(theEntity.getFhirVersion().name());
b.append("): ");
b.append(e.getMessage());
String msg = b.toString();
ourLog.error(msg, e);
throw new DataFormatException(Msg.code(928) + msg, e);
}
} else {
retVal = (R) myContext.getResourceDefinition(theEntity.getResourceType()).newInstance();
}
// 5. fill MetaData
retVal = populateResourceMetadata(theEntity, theForHistoryOperation, tagList, version, resourceType, retVal);
// 6. Handle source (provenance)
if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
String sourceString = cleanProvenanceSourceUri(provenanceSourceUri)
+ (isNotBlank(provenanceRequestId) ? "#" : "")
+ defaultString(provenanceRequestId);
MetaUtil.setSource(myContext, retVal, sourceString);
}
// 7. Add partition information
if (myPartitionSettings.isPartitioningEnabled()) {
PartitionablePartitionId partitionId = theEntity.getPartitionId();
if (partitionId != null && partitionId.getPartitionId() != null) {
PartitionEntity persistedPartition = myPartitionLookupSvc.getPartitionById(partitionId.getPartitionId());
retVal.setUserData(Constants.RESOURCE_PARTITION_ID, persistedPartition.toRequestPartitionId());
} else {
retVal.setUserData(Constants.RESOURCE_PARTITION_ID, null);
}
}
return retVal;
}
protected <R extends IBaseResource> R populateResourceMetadata(IBaseResourceEntity theEntity, boolean theForHistoryOperation, @Nullable Collection<? extends BaseTag> tagList, long theVersion, Class<R> theResourceType, R theResource) {
if (theResource instanceof IResource) {
IResource res = (IResource) theResource;
theResource = populateResourceMetadataHapi(theResourceType, theEntity, tagList, theForHistoryOperation, res, theVersion);
} else {
IAnyResource res = (IAnyResource) theResource;
theResource = populateResourceMetadataRi(theResourceType, theEntity, tagList, theForHistoryOperation, res, theVersion);
}
return theResource;
}
public String toResourceName(Class<? extends IBaseResource> theResourceType) {
return myContext.getResourceType(theResourceType);
}
String toResourceName(IBaseResource theResource) { String toResourceName(IBaseResource theResource) {
return myContext.getResourceType(theResource); return myContext.getResourceType(theResource);
@ -1375,7 +1013,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) { if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) {
ourLog.debug("Resource {} has not changed", entity.getIdDt().toUnqualified().getValue()); ourLog.debug("Resource {} has not changed", entity.getIdDt().toUnqualified().getValue());
if (theResource != null) { if (theResource != null) {
updateResourceMetadata(entity, theResource); myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
} }
entity.setUnchangedInCurrentOperation(true); entity.setUnchangedInCurrentOperation(true);
return entity; return entity;
@ -1475,7 +1113,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
if (theResource != null) { if (theResource != null) {
updateResourceMetadata(entity, theResource); myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
} }
@ -1498,7 +1136,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (getConfig().isMassIngestionMode()) { if (getConfig().isMassIngestionMode()) {
oldResource = null; oldResource = null;
} else { } else {
oldResource = toResource(entity, false); oldResource = myJpaStorageResourceParser.toResource(entity, false);
} }
notifyInterceptors(theRequest, theResource, oldResource, theTransactionDetails, true); notifyInterceptors(theRequest, theResource, oldResource, theTransactionDetails, true);
@ -1510,7 +1148,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
historyEntity = ((ResourceTable) readEntity(theResourceId, theRequest)).getCurrentVersionEntity(); historyEntity = ((ResourceTable) readEntity(theResourceId, theRequest)).getCurrentVersionEntity();
// Update version/lastUpdated so that interceptors see the correct version // Update version/lastUpdated so that interceptors see the correct version
updateResourceMetadata(savedEntity, theResource); myJpaStorageResourceParser.updateResourceMetadata(savedEntity, theResource);
// Populate the PID in the resource, so it is available to hooks // Populate the PID in the resource, so it is available to hooks
addPidToResource(savedEntity, theResource); addPidToResource(savedEntity, theResource);
@ -1537,7 +1175,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (!changed && myConfig.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) { if (!changed && myConfig.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) {
ourLog.debug("Resource {} has not changed", historyEntity.getIdDt().toUnqualified().getValue()); ourLog.debug("Resource {} has not changed", historyEntity.getIdDt().toUnqualified().getValue());
updateResourceMetadata(historyEntity, theResource); myJpaStorageResourceParser.updateResourceMetadata(historyEntity, theResource);
return historyEntity; return historyEntity;
} }
@ -1556,7 +1194,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
historyEntity.setResourceTextVc(encodedResource.getResourceText()); historyEntity.setResourceTextVc(encodedResource.getResourceText());
myResourceHistoryTableDao.save(historyEntity); myResourceHistoryTableDao.save(historyEntity);
updateResourceMetadata(historyEntity, theResource); myJpaStorageResourceParser.updateResourceMetadata(historyEntity, theResource);
return historyEntity; return historyEntity;
} }
@ -1586,14 +1224,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
/** /**
* TODO eventually consider refactoring this to be part of an interceptor. * TODO eventually consider refactoring this to be part of an interceptor.
* * <p>
* Throws an exception if the partition of the request, and the partition of the existing entity do not match. * Throws an exception if the partition of the request, and the partition of the existing entity do not match.
*
* @param theRequest the request. * @param theRequest the request.
* @param entity the existing entity. * @param entity the existing entity.
*/ */
private void failIfPartitionMismatch(RequestDetails theRequest, ResourceTable entity) { private void failIfPartitionMismatch(RequestDetails theRequest, ResourceTable entity) {
if (myPartitionSettings.isPartitioningEnabled() && theRequest != null && theRequest.getTenantId() != null && entity.getPartitionId() != null && if (myPartitionSettings.isPartitioningEnabled() && theRequest != null && theRequest.getTenantId() != null && entity.getPartitionId() != null &&
theRequest.getTenantId() != ALL_PARTITIONS_NAME) { !ALL_PARTITIONS_NAME.equals(theRequest.getTenantId())) {
PartitionEntity partitionEntity = myPartitionLookupSvc.getPartitionByName(theRequest.getTenantId()); PartitionEntity partitionEntity = myPartitionLookupSvc.getPartitionByName(theRequest.getTenantId());
//partitionEntity should never be null //partitionEntity should never be null
if (partitionEntity != null && !partitionEntity.getId().equals(entity.getPartitionId().getPartitionId())) { if (partitionEntity != null && !partitionEntity.getId().equals(entity.getPartitionId().getPartitionId())) {
@ -1668,8 +1307,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
@Override @Override
public ResourceTable updateInternal(RequestDetails theRequestDetails, T theResource, boolean thePerformIndexing, boolean theForceUpdateVersion, public DaoMethodOutcome updateInternal(RequestDetails theRequestDetails, T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion,
IBasePersistedResource theEntity, IIdType theResourceId, IBaseResource theOldResource, TransactionDetails theTransactionDetails) { IBasePersistedResource theEntity, IIdType theResourceId, @Nullable IBaseResource theOldResource, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) {
ResourceTable entity = (ResourceTable) theEntity; ResourceTable entity = (ResourceTable) theEntity;
@ -1696,7 +1335,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
// Update version/lastUpdated so that interceptors see the correct version // Update version/lastUpdated so that interceptors see the correct version
updateResourceMetadata(savedEntity, theResource); myJpaStorageResourceParser.updateResourceMetadata(savedEntity, theResource);
// Populate the PID in the resource so it is available to hooks // Populate the PID in the resource so it is available to hooks
addPidToResource(savedEntity, theResource); addPidToResource(savedEntity, theResource);
@ -1706,7 +1345,42 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
notifyInterceptors(theRequestDetails, theResource, theOldResource, theTransactionDetails, false); notifyInterceptors(theRequestDetails, theResource, theOldResource, theTransactionDetails, false);
} }
return savedEntity; Collection<? extends BaseTag> tagList = Collections.emptyList();
if (entity.isHasTags()) {
tagList = entity.getTags();
}
long version = entity.getVersion();
myJpaStorageResourceParser.populateResourceMetadata(entity, false, tagList, version, theResource);
boolean wasDeleted = false;
// NB If this if-else ever gets collapsed, make sure to account for possible null (will happen in mass-ingestion mode)
if (theOldResource instanceof IResource) {
wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) theOldResource) != null;
} else if (theOldResource instanceof IAnyResource) {
wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) theOldResource) != null;
}
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType).setCreated(wasDeleted);
if (!thePerformIndexing) {
IIdType id = getContext().getVersion().newIdType();
id.setValue(entity.getIdDt().getValue());
outcome.setId(id);
}
// Only include a task timer if we're not in a sub-request (i.e. a transaction)
// since individual item times don't actually make much sense in the context
// of a transaction
StopWatch w = null;
if (theRequestDetails != null && !theRequestDetails.isSubRequest()) {
if (theTransactionDetails != null && !theTransactionDetails.isFhirTransaction()) {
w = new StopWatch(theTransactionDetails.getTransactionDate());
}
}
populateOperationOutcomeForUpdate(w, outcome, theMatchUrl, outcome.getOperationType());
return outcome;
} }
private void notifyInterceptors(RequestDetails theRequestDetails, T theResource, IBaseResource theOldResource, TransactionDetails theTransactionDetails, boolean isUnchanged) { private void notifyInterceptors(RequestDetails theRequestDetails, T theResource, IBaseResource theOldResource, TransactionDetails theTransactionDetails, boolean isUnchanged) {
@ -1735,26 +1409,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
} }
protected void updateResourceMetadata(IBaseResourceEntity theEntity, IBaseResource theResource) {
IIdType id = theEntity.getIdDt();
if (getContext().getVersion().getVersion().isRi()) {
id = getContext().getVersion().newIdType().setValue(id.getValue());
}
if (id.hasResourceType() == false) {
id = id.withResourceType(theEntity.getResourceType());
}
theResource.setId(id);
if (theResource instanceof IResource) {
ResourceMetadataKeyEnum.VERSION.put((IResource) theResource, id.getVersionIdPart());
ResourceMetadataKeyEnum.UPDATED.put((IResource) theResource, theEntity.getUpdated());
} else {
IBaseMetaType meta = theResource.getMeta();
meta.setVersionId(id.getVersionIdPart());
meta.setLastUpdated(theEntity.getUpdatedDate());
}
}
private void validateChildReferenceTargetTypes(IBase theElement, String thePath) { private void validateChildReferenceTargetTypes(IBase theElement, String thePath) {
if (theElement == null) { if (theElement == null) {
@ -1896,6 +1551,22 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
myPartitionSettings = thePartitionSettings; myPartitionSettings = thePartitionSettings;
} }
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
private final TagDefinition myTagDefinition;
private final MemoryCacheService.TagDefinitionCacheKey myKey;
public AddTagDefinitionToCacheAfterCommitSynchronization(MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
myTagDefinition = theTagDefinition;
myKey = theKey;
}
@Override
public void afterCommit() {
myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
}
}
@Nonnull @Nonnull
public static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(TagTypeEnum theTagType, String theScheme, String theTerm) { public static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(TagTypeEnum theTagType, String theScheme, String theTerm) {
return new MemoryCacheService.TagDefinitionCacheKey(theTagType, theScheme, theTerm); return new MemoryCacheService.TagDefinitionCacheKey(theTagType, theScheme, theTerm);
@ -1999,34 +1670,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
ourValidationDisabledForUnitTest = theValidationDisabledForUnitTest; ourValidationDisabledForUnitTest = theValidationDisabledForUnitTest;
} }
private static List<BaseCodingDt> toBaseCodingList(List<IBaseCoding> theSecurityLabels) { /**
ArrayList<BaseCodingDt> retVal = new ArrayList<>(theSecurityLabels.size()); * Do not call this method outside of unit tests
for (IBaseCoding next : theSecurityLabels) { */
retVal.add((BaseCodingDt) next); @VisibleForTesting
} public void setJpaStorageResourceParserForUnitTest(IJpaStorageResourceParser theJpaStorageResourceParser) {
return retVal; myJpaStorageResourceParser = theJpaStorageResourceParser;
}
public static void validateResourceType(BaseHasResource theEntity, String theResourceName) {
if (!theResourceName.equals(theEntity.getResourceType())) {
throw new ResourceNotFoundException(Msg.code(935) + "Resource with ID " + theEntity.getIdDt().getIdPart() + " exists but it is not of type " + theResourceName + ", found resource of type " + theEntity.getResourceType());
}
}
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
private final TagDefinition myTagDefinition;
private final MemoryCacheService.TagDefinitionCacheKey myKey;
public AddTagDefinitionToCacheAfterCommitSynchronization(MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
myTagDefinition = theTagDefinition;
myKey = theKey;
}
@Override
public void afterCommit() {
myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
}
} }
} }

View File

@ -57,26 +57,23 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.patch.FhirPatch;
import ca.uhn.fhir.jpa.patch.JsonPatchUtils;
import ca.uhn.fhir.jpa.patch.XmlPatchUtils;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider; import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum; import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
import ca.uhn.fhir.model.dstu2.resource.ListResource; import ca.uhn.fhir.model.dstu2.resource.ListResource;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.CacheControlDirective; import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum; import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.SearchContainedModeEnum; import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
import ca.uhn.fhir.rest.api.ValidationModeEnum; import ca.uhn.fhir.rest.api.ValidationModeEnum;
@ -104,9 +101,9 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.ObjectUtil; import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.ReflectionUtil; import ca.uhn.fhir.util.ReflectionUtil;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.IInstanceValidatorModule; import ca.uhn.fhir.validation.IInstanceValidatorModule;
import ca.uhn.fhir.validation.IValidationContext; import ca.uhn.fhir.validation.IValidationContext;
@ -118,7 +115,6 @@ import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseCoding; import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseMetaType; import org.hl7.fhir.instance.model.api.IBaseMetaType;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -160,7 +156,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
public static final String BASE_RESOURCE_NAME = "resource"; public static final String BASE_RESOURCE_NAME = "resource";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
@Autowired @Autowired
protected PlatformTransactionManager myPlatformTransactionManager; protected PlatformTransactionManager myPlatformTransactionManager;
@Autowired(required = false) @Autowired(required = false)
@ -181,18 +176,37 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter; private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
@Autowired @Autowired
private IJobCoordinator myJobCoordinator; private IJobCoordinator myJobCoordinator;
private IInstanceValidatorModule myInstanceValidator; private IInstanceValidatorModule myInstanceValidator;
private String myResourceName; private String myResourceName;
private Class<T> myResourceType; private Class<T> myResourceType;
@Autowired
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
@Autowired @Autowired
private MemoryCacheService myMemoryCacheService; private MemoryCacheService myMemoryCacheService;
private TransactionTemplate myTxTemplate; private TransactionTemplate myTxTemplate;
@Autowired @Autowired
private UrlPartitioner myUrlPartitioner; private UrlPartitioner myUrlPartitioner;
@Override
protected HapiTransactionService getTransactionService() {
return myTransactionService;
}
@VisibleForTesting
public void setTransactionService(HapiTransactionService theTransactionService) {
myTransactionService = theTransactionService;
}
@Override
protected MatchResourceUrlService getMatchResourceUrlService() {
return myMatchResourceUrlService;
}
@Override
protected IStorageResourceParser getStorageResourceParser() {
return myJpaStorageResourceParser;
}
/** /**
* @deprecated Use {@link #create(T, RequestDetails)} instead * @deprecated Use {@link #create(T, RequestDetails)} instead
*/ */
@ -219,11 +233,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return create(theResource, theIfNoneExist, true, new TransactionDetails(), theRequestDetails); return create(theResource, theIfNoneExist, true, new TransactionDetails(), theRequestDetails);
} }
@VisibleForTesting
public void setTransactionService(HapiTransactionService theTransactionService) {
myTransactionService = theTransactionService;
}
@Override @Override
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, @Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) { public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, @Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails)); return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails));
@ -259,14 +268,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName()); RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
return doCreateForPostOrPut(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails, requestPartitionId); return doCreateForPostOrPut(theRequestDetails, theResource, theIfNoneExist, true, thePerformIndexing, requestPartitionId, RestOperationTypeEnum.CREATE, theTransactionDetails);
} }
/** /**
* Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails)} * Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails)}
* as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails)}. * as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails)}.
*/ */
private DaoMethodOutcome doCreateForPostOrPut(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { private DaoMethodOutcome doCreateForPostOrPut(RequestDetails theRequest, T theResource, String theMatchUrl, boolean theProcessMatchUrl, boolean thePerformIndexing, RequestPartitionId theRequestPartitionId, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) {
StopWatch w = new StopWatch(); StopWatch w = new StopWatch();
preProcessResourceForStorage(theResource); preProcessResourceForStorage(theResource);
@ -275,13 +284,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ResourceTable entity = new ResourceTable(); ResourceTable entity = new ResourceTable();
entity.setResourceType(toResourceName(theResource)); entity.setResourceType(toResourceName(theResource));
entity.setPartitionId(myRequestPartitionHelperService.toStoragePartition(theRequestPartitionId)); entity.setPartitionId(myRequestPartitionHelperService.toStoragePartition(theRequestPartitionId));
entity.setCreatedByMatchUrl(theIfNoneExist); entity.setCreatedByMatchUrl(theMatchUrl);
entity.setVersion(1); entity.setVersion(1);
if (isNotBlank(theIfNoneExist)) { if (isNotBlank(theMatchUrl) && theProcessMatchUrl) {
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theIfNoneExist, myResourceType, theTransactionDetails, theRequest); Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest);
if (match.size() > 1) { if (match.size() > 1) {
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theIfNoneExist, match.size()); String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theMatchUrl, match.size());
throw new PreconditionFailedException(Msg.code(958) + msg); throw new PreconditionFailedException(Msg.code(958) + msg);
} else if (match.size() == 1) { } else if (match.size() == 1) {
ResourcePersistentId pid = match.iterator().next(); ResourcePersistentId pid = match.iterator().next();
@ -289,7 +298,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
Supplier<LazyDaoMethodOutcome.EntityAndResource> entitySupplier = () -> { Supplier<LazyDaoMethodOutcome.EntityAndResource> entitySupplier = () -> {
return myTxTemplate.execute(tx -> { return myTxTemplate.execute(tx -> {
ResourceTable foundEntity = myEntityManager.find(ResourceTable.class, pid.getId()); ResourceTable foundEntity = myEntityManager.find(ResourceTable.class, pid.getId());
IBaseResource resource = toResource(foundEntity, false); IBaseResource resource = myJpaStorageResourceParser.toResource(foundEntity, false);
theResource.setId(resource.getIdElement().getValue()); theResource.setId(resource.getIdElement().getValue());
return new LazyDaoMethodOutcome.EntityAndResource(foundEntity, resource); return new LazyDaoMethodOutcome.EntityAndResource(foundEntity, resource);
}); });
@ -314,7 +323,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}); });
}; };
return toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true); DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
StorageResponseCodeEnum responseCode = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH;
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreateConditionalWithMatch", w.getMillisAndRestart(), UrlUtil.sanitizeUrlPart(theMatchUrl));
outcome.setOperationOutcome(createInfoOperationOutcome(msg, responseCode));
return outcome;
} }
} }
@ -385,15 +398,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
theTransactionDetails.addResolvedResourceId(persistentId.getAssociatedResourceId(), persistentId); theTransactionDetails.addResolvedResourceId(persistentId.getAssociatedResourceId(), persistentId);
// Pre-cache the match URL // Pre-cache the match URL
if (theIfNoneExist != null) { if (theMatchUrl != null) {
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theIfNoneExist, persistentId); myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theMatchUrl, persistentId);
} }
// Update the version/last updated in the resource so that interceptors get // Update the version/last updated in the resource so that interceptors get
// the correct version // the correct version
// TODO - the above updateEntity calls updateResourceMetadata // TODO - the above updateEntity calls updateResourceMetadata
// Maybe we don't need this call here? // Maybe we don't need this call here?
updateResourceMetadata(entity, theResource); myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
// Populate the PID in the resource so it is available to hooks // Populate the PID in the resource so it is available to hooks
addPidToResource(entity, theResource); addPidToResource(entity, theResource);
@ -409,15 +422,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, hookParams); doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, hookParams);
} }
DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, theResource).setCreated(true); DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, theResource, theMatchUrl, theOperationType)
.setCreated(true);
if (!thePerformIndexing) { if (!thePerformIndexing) {
outcome.setId(theResource.getIdElement()); outcome.setId(theResource.getIdElement());
} }
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart()); populateOperationOutcomeForUpdate(w, outcome, theMatchUrl, theOperationType);
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
ourLog.debug(msg);
return outcome; return outcome;
} }
@ -531,8 +544,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// if not found, return an outcome anyways. // if not found, return an outcome anyways.
// Because no object actually existed, we'll // Because no object actually existed, we'll
// just set the id and nothing else // just set the id and nothing else
DaoMethodOutcome outcome = createMethodOutcomeForResourceId(theId.getValue(), MESSAGE_KEY_DELETE_RESOURCE_NOT_EXISTING); return createMethodOutcomeForResourceId(theId.getValue(), MESSAGE_KEY_DELETE_RESOURCE_NOT_EXISTING, StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND);
return outcome;
} }
if (theId.hasVersionIdPart() && Long.parseLong(theId.getVersionIdPart()) != entity.getVersion()) { if (theId.hasVersionIdPart() && Long.parseLong(theId.getVersionIdPart()) != entity.getVersion()) {
@ -541,7 +553,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// Don't delete again if it's already deleted // Don't delete again if it's already deleted
if (isDeleted(entity)) { if (isDeleted(entity)) {
DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED); DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED, StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED);
// used to exist, so we'll set the persistent id // used to exist, so we'll set the persistent id
outcome.setPersistentId(new ResourcePersistentId(entity.getResourceId())); outcome.setPersistentId(new ResourcePersistentId(entity.getResourceId()));
@ -552,7 +564,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
StopWatch w = new StopWatch(); StopWatch w = new StopWatch();
T resourceToDelete = toResource(myResourceType, entity, null, false); T resourceToDelete = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
theDeleteConflicts.setResourceIdMarkedForDeletion(theId); theDeleteConflicts.setResourceIdMarkedForDeletion(theId);
// Notify IServerOperationInterceptors about pre-action call // Notify IServerOperationInterceptors about pre-action call
@ -581,14 +593,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams); doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete).setCreated(true); DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete, null, RestOperationTypeEnum.DELETE).setCreated(true);
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext()); String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulDeletes", 1);
String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", 1, w.getMillis()); msg += " " + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis());
String severity = "information"; outcome.setOperationOutcome(createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE));
String code = "informational";
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
outcome.setOperationOutcome(oo);
return outcome; return outcome;
} }
@ -669,7 +678,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid.getIdAsLong()); ResourceTable entity = myEntityManager.find(ResourceTable.class, pid.getIdAsLong());
deletedResources.add(entity); deletedResources.add(entity);
T resourceToDelete = toResource(myResourceType, entity, null, false); T resourceToDelete = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call // Notify IServerOperationInterceptors about pre-action call
HookParams hooks = new HookParams() HookParams hooks = new HookParams()
@ -703,17 +712,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
IBaseOperationOutcome oo; IBaseOperationOutcome oo;
if (deletedResources.isEmpty()) { if (deletedResources.isEmpty()) {
oo = OperationOutcomeUtil.newInstance(getContext()); String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "unableToDeleteNotFound", theUrl);
String message = getMessageSanitized("unableToDeleteNotFound", theUrl); oo = createOperationOutcome(OO_SEVERITY_WARN, msg, "not-found", StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND);
String severity = "warning";
String code = "not-found";
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
} else { } else {
oo = OperationOutcomeUtil.newInstance(getContext()); String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulDeletes", deletedResources.size());
String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", deletedResources.size(), w.getMillis()); msg += " " + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis());
String severity = "information"; oo = createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE);
String code = "informational";
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
} }
ourLog.debug("Processed delete on {} (matched {} resource(s)) in {}ms", theUrl, deletedResources.size(), w.getMillis()); ourLog.debug("Processed delete on {} (matched {} resource(s)) in {}ms", theUrl, deletedResources.size(), w.getMillis());
@ -745,7 +749,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
private <MT extends IBaseMetaType> void doMetaAdd(MT theMetaAdd, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { private <MT extends IBaseMetaType> void doMetaAdd(MT theMetaAdd, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
IBaseResource oldVersion = toResource(theEntity, false); IBaseResource oldVersion = myJpaStorageResourceParser.toResource(theEntity, false);
List<TagDefinition> tags = toTagList(theMetaAdd); List<TagDefinition> tags = toTagList(theMetaAdd);
for (TagDefinition nextDef : tags) { for (TagDefinition nextDef : tags) {
@ -778,7 +782,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
myEntityManager.merge(theEntity); myEntityManager.merge(theEntity);
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
IBaseResource newVersion = toResource(theEntity, false); IBaseResource newVersion = myJpaStorageResourceParser.toResource(theEntity, false);
HookParams preStorageParams = new HookParams() HookParams preStorageParams = new HookParams()
.add(IBaseResource.class, oldVersion) .add(IBaseResource.class, oldVersion)
.add(IBaseResource.class, newVersion) .add(IBaseResource.class, newVersion)
@ -802,7 +806,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
private <MT extends IBaseMetaType> void doMetaDelete(MT theMetaDel, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { private <MT extends IBaseMetaType> void doMetaDelete(MT theMetaDel, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
// todo mb update hibernate search index if we are storing resources - it assumes inline tags. // todo mb update hibernate search index if we are storing resources - it assumes inline tags.
IBaseResource oldVersion = toResource(theEntity, false); IBaseResource oldVersion = myJpaStorageResourceParser.toResource(theEntity, false);
List<TagDefinition> tags = toTagList(theMetaDel); List<TagDefinition> tags = toTagList(theMetaDel);
@ -824,7 +828,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
theEntity = myEntityManager.merge(theEntity); theEntity = myEntityManager.merge(theEntity);
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
IBaseResource newVersion = toResource(theEntity, false); IBaseResource newVersion = myJpaStorageResourceParser.toResource(theEntity, false);
HookParams preStorageParams = new HookParams() HookParams preStorageParams = new HookParams()
.add(IBaseResource.class, oldVersion) .add(IBaseResource.class, oldVersion)
.add(IBaseResource.class, newVersion) .add(IBaseResource.class, newVersion)
@ -889,6 +893,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
@Override @Override
@Nonnull
public String getResourceName() { public String getResourceName() {
return myResourceName; return myResourceName;
} }
@ -908,7 +913,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Transactional @Transactional
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) { public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch(); StopWatch w = new StopWatch();
IBundleProvider retVal = super.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset); IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset);
ourLog.debug("Processed history on {} in {}ms", myResourceName, w.getMillisAndRestart()); ourLog.debug("Processed history on {} in {}ms", myResourceName, w.getMillisAndRestart());
return retVal; return retVal;
} }
@ -924,7 +929,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
BaseHasResource entity = readEntity(id, theRequest); BaseHasResource entity = readEntity(id, theRequest);
IBundleProvider retVal = super.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset); IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset);
ourLog.debug("Processed history on {} in {}ms", id, w.getMillisAndRestart()); ourLog.debug("Processed history on {} in {}ms", id, w.getMillisAndRestart());
return retVal; return retVal;
@ -939,7 +944,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
BaseHasResource entity = readEntity(id, theRequest); BaseHasResource entity = readEntity(id, theRequest);
IBundleProvider retVal = super.history(theRequest, myResourceName, entity.getId(), IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(),
theHistorySearchDateRangeParam.getLowerBoundAsInstant(), theHistorySearchDateRangeParam.getLowerBoundAsInstant(),
theHistorySearchDateRangeParam.getUpperBoundAsInstant(), theHistorySearchDateRangeParam.getUpperBoundAsInstant(),
theHistorySearchDateRangeParam.getOffset(), theHistorySearchDateRangeParam.getOffset(),
@ -1096,67 +1101,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return toMetaDt(theType, tagDefinitions); return toMetaDt(theType, tagDefinitions);
} }
@Override
public DaoMethodOutcome patch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequest) {
TransactionDetails transactionDetails = new TransactionDetails();
return myTransactionService.execute(theRequest, transactionDetails, tx -> doPatch(theId, theConditionalUrl, thePatchType, thePatchBody, theFhirPatchBody, theRequest, transactionDetails));
}
private DaoMethodOutcome doPatch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
ResourceTable entityToUpdate;
if (isNotBlank(theConditionalUrl)) {
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theConditionalUrl, myResourceType, theTransactionDetails, theRequest);
if (match.size() > 1) {
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "PATCH", theConditionalUrl, match.size());
throw new PreconditionFailedException(Msg.code(972) + msg);
} else if (match.size() == 1) {
ResourcePersistentId pid = match.iterator().next();
entityToUpdate = myEntityManager.find(ResourceTable.class, pid.getId());
} else {
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidMatchUrlNoMatches", theConditionalUrl);
throw new ResourceNotFoundException(Msg.code(973) + msg);
}
} else {
entityToUpdate = readEntityLatestVersion(theId, theRequest, theTransactionDetails);
if (theId.hasVersionIdPart()) {
if (theId.getVersionIdPartAsLong() != entityToUpdate.getVersion()) {
throw new ResourceVersionConflictException(Msg.code(974) + "Version " + theId.getVersionIdPart() + " is not the most recent version of this resource, unable to apply patch");
}
}
}
validateResourceType(entityToUpdate);
if (isDeleted(entityToUpdate)) {
throw createResourceGoneException(entityToUpdate);
}
IBaseResource resourceToUpdate = toResource(entityToUpdate, false);
IBaseResource destination;
switch (thePatchType) {
case JSON_PATCH:
destination = JsonPatchUtils.apply(getContext(), resourceToUpdate, thePatchBody);
break;
case XML_PATCH:
destination = XmlPatchUtils.apply(getContext(), resourceToUpdate, thePatchBody);
break;
case FHIR_PATCH_XML:
case FHIR_PATCH_JSON:
default:
IBaseParameters fhirPatchJson = theFhirPatchBody;
new FhirPatch(getContext()).apply(resourceToUpdate, fhirPatchJson);
destination = resourceToUpdate;
break;
}
@SuppressWarnings("unchecked")
T destinationCasted = (T) destination;
myFhirContext.newJsonParser().setParserErrorHandler(new StrictErrorHandler()).encodeResourceToString(destinationCasted);
return update(destinationCasted, null, true, theRequest);
}
private boolean isDeleted(BaseHasResource entityToUpdate) { private boolean isDeleted(BaseHasResource entityToUpdate) {
return entityToUpdate.getDeleted() != null; return entityToUpdate.getDeleted() != null;
} }
@ -1205,7 +1149,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
throw createResourceGoneException(entity.get()); throw createResourceGoneException(entity.get());
} }
T retVal = toResource(myResourceType, entity.get(), null, false); T retVal = myJpaStorageResourceParser.toResource(myResourceType, entity.get(), null, false);
ourLog.debug("Processed read on {} in {}ms", thePid, w.getMillis()); ourLog.debug("Processed read on {} in {}ms", thePid, w.getMillis());
return retVal; return retVal;
@ -1239,7 +1183,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
BaseHasResource entity = readEntity(theId, theRequest); BaseHasResource entity = readEntity(theId, theRequest);
validateResourceType(entity); validateResourceType(entity);
T retVal = toResource(myResourceType, entity, null, false); T retVal = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
if (theDeletedOk == false) { if (theDeletedOk == false) {
if (isDeleted(entity)) { if (isDeleted(entity)) {
@ -1293,7 +1237,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ResourceTable entity = entityOpt.get(); ResourceTable entity = entityOpt.get();
try { try {
T resource = (T) toResource(entity, false); T resource = (T) myJpaStorageResourceParser.toResource(entity, false);
reindex(resource, entity); reindex(resource, entity);
} catch (BaseServerResponseException | DataFormatException e) { } catch (BaseServerResponseException | DataFormatException e) {
myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED); myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED);
@ -1375,6 +1319,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return entity; return entity;
} }
@Override
protected IBasePersistedResource readEntityLatestVersion(ResourcePersistentId thePersistentId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
return myEntityManager.find(ResourceTable.class, thePersistentId.getIdAsLong());
}
@Override
@Nonnull @Nonnull
protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequestDetails, getResourceName(), theId); RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequestDetails, getResourceName(), theId);
@ -1691,8 +1642,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails) { private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
StopWatch w = new StopWatch();
T resource = theResource; T resource = theResource;
preProcessResourceForStorage(resource); preProcessResourceForStorage(resource);
@ -1701,6 +1650,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ResourceTable entity = null; ResourceTable entity = null;
IIdType resourceId; IIdType resourceId;
RestOperationTypeEnum update = RestOperationTypeEnum.UPDATE;
if (isNotBlank(theMatchUrl)) { if (isNotBlank(theMatchUrl)) {
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest, theResource); Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest, theResource);
if (match.size() > 1) { if (match.size() > 1) {
@ -1711,7 +1661,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
entity = myEntityManager.find(ResourceTable.class, pid.getId()); entity = myEntityManager.find(ResourceTable.class, pid.getId());
resourceId = entity.getIdDt(); resourceId = entity.getIdDt();
} else { } else {
DaoMethodOutcome outcome = create(resource, null, thePerformIndexing, theTransactionDetails, theRequest); RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName());
DaoMethodOutcome outcome = doCreateForPostOrPut(theRequest, resource, theMatchUrl, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails);
// Pre-cache the match URL // Pre-cache the match URL
if (outcome.getPersistentId() != null) { if (outcome.getPersistentId() != null) {
@ -1750,85 +1701,16 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
if (create) { if (create) {
return doCreateForPostOrPut(resource, null, thePerformIndexing, theTransactionDetails, theRequest, requestPartitionId); return doCreateForPostOrPut(theRequest, resource, null, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails);
} }
} }
if (resourceId.hasVersionIdPart() && Long.parseLong(resourceId.getVersionIdPart()) != entity.getVersion()) { // Start
throw new ResourceVersionConflictException(Msg.code(989) + "Trying to update " + resourceId + " but this is not the current version");
return doUpdateForUpdateOrPatch(theRequest, resourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, resource, entity, update, theTransactionDetails);
} }
if (resourceId.hasResourceType() && !resourceId.getResourceType().equals(getResourceName())) {
throw new UnprocessableEntityException(Msg.code(990) + "Invalid resource ID[" + entity.getIdDt().toUnqualifiedVersionless() + "] of type[" + entity.getResourceType() + "] - Does not match expected [" + getResourceName() + "]");
}
IBaseResource oldResource;
if (getConfig().isMassIngestionMode()) {
oldResource = null;
} else {
oldResource = toResource(entity, false);
}
/*
* Mark the entity as not deleted - This is also done in the actual updateInternal()
* method later on so it usually doesn't matter whether we do it here, but in the
* case of a transaction with multiple PUTs we don't get there until later so
* having this here means that a transaction can have a reference in one
* resource to another resource in the same transaction that is being
* un-deleted by the transaction. Wacky use case, sure. But it's real.
*
* See SystemProviderR4Test#testTransactionReSavesPreviouslyDeletedResources
* for a test that needs this.
*/
boolean wasDeleted = isDeleted(entity);
entity.setDeleted(null);
/*
* If we aren't indexing, that means we're doing this inside a transaction.
* The transaction will do the actual storage to the database a bit later on,
* after placeholder IDs have been replaced, by calling {@link #updateInternal}
* directly. So we just bail now.
*/
if (!thePerformIndexing) {
resource.setId(entity.getIdDt().getValue());
DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, resource).setCreated(wasDeleted);
outcome.setPreviousResource(oldResource);
if (!outcome.isNop()) {
// Technically this may not end up being right since we might not increment if the
// contents turn out to be the same
outcome.setId(outcome.getId().withVersion(Long.toString(outcome.getId().getVersionIdPartAsLong() + 1)));
}
return outcome;
}
/*
* Otherwise, we're not in a transaction
*/
ResourceTable savedEntity = updateInternal(theRequest, resource, thePerformIndexing, theForceUpdateVersion, entity, resourceId, oldResource, theTransactionDetails);
if (thePerformIndexing) {
Collection<? extends BaseTag> tagList = Collections.emptyList();
if (entity.isHasTags()) {
tagList = entity.getTags();
}
long version = entity.getVersion();
populateResourceMetadata(entity, false, tagList, version, getResourceType(), resource);
}
DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, resource).setCreated(wasDeleted);
if (!thePerformIndexing) {
IIdType id = getContext().getVersion().newIdType();
id.setValue(entity.getIdDt().getValue());
outcome.setId(id);
}
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart());
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
ourLog.debug(msg);
return outcome;
}
/** /**
* Method for updating the historical version of the resource when a history version id is included in the request. * Method for updating the historical version of the resource when a history version id is included in the request.
@ -1844,8 +1726,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// No need for indexing as this will update a non-current version of the resource which will not be searchable // No need for indexing as this will update a non-current version of the resource which will not be searchable
preProcessResourceForStorage(theResource, theRequest, theTransactionDetails, false); preProcessResourceForStorage(theResource, theRequest, theTransactionDetails, false);
BaseHasResource entity = null; BaseHasResource entity;
BaseHasResource currentEntity = null; BaseHasResource currentEntity;
IIdType resourceId; IIdType resourceId;
@ -1874,12 +1756,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
entity.setDeleted(null); entity.setDeleted(null);
boolean isUpdatingCurrent = resourceId.hasVersionIdPart() && Long.parseLong(resourceId.getVersionIdPart()) == currentEntity.getVersion(); boolean isUpdatingCurrent = resourceId.hasVersionIdPart() && Long.parseLong(resourceId.getVersionIdPart()) == currentEntity.getVersion();
IBasePersistedResource savedEntity = updateHistoryEntity(theRequest, theResource, currentEntity, entity, resourceId, theTransactionDetails, isUpdatingCurrent); IBasePersistedResource savedEntity = updateHistoryEntity(theRequest, theResource, currentEntity, entity, resourceId, theTransactionDetails, isUpdatingCurrent);
DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, theResource).setCreated(wasDeleted); DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, theResource, null, RestOperationTypeEnum.UPDATE).setCreated(wasDeleted);
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart()); populateOperationOutcomeForUpdate(w, outcome, null, RestOperationTypeEnum.UPDATE);
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
ourLog.debug(msg);
return outcome; return outcome;
} }

View File

@ -1,12 +1,20 @@
package ca.uhn.fhir.jpa.dao; package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.jpa.util.ResourceCountCache;
@ -25,6 +33,9 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery; import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaQuery;
@ -58,25 +69,36 @@ import java.util.stream.Collectors;
* #L% * #L%
*/ */
public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends BaseHapiFhirDao<IBaseResource> implements IFhirSystemDao<T, MT> { public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends BaseStorageDao implements IFhirSystemDao<T, MT> {
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0]; public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class);
public ResourceCountCache myResourceCountsCache; public ResourceCountCache myResourceCountsCache;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@Autowired @Autowired
private TransactionProcessor myTransactionProcessor; private TransactionProcessor myTransactionProcessor;
@Autowired @Autowired
private ApplicationContext myApplicationContext; private ApplicationContext myApplicationContext;
@Autowired
private ExpungeService myExpungeService;
@Autowired
private IResourceTableDao myResourceTableDao;
@Autowired
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
@Autowired
private IResourceTagDao myResourceTagDao;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
@VisibleForTesting @VisibleForTesting
public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) { public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
myTransactionProcessor = theTransactionProcessor; myTransactionProcessor = theTransactionProcessor;
} }
@Override
@PostConstruct @PostConstruct
public void start() { public void start() {
super.start();
myTransactionProcessor.setDao(this); myTransactionProcessor.setDao(this);
} }
@ -124,7 +146,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
@Override @Override
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) { public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch(); StopWatch w = new StopWatch();
IBundleProvider retVal = super.history(theRequestDetails, null, null, theSince, theUntil, theOffset); IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset);
ourLog.info("Processed global history in {}ms", w.getMillisAndRestart()); ourLog.info("Processed global history in {}ms", w.getMillisAndRestart());
return retVal; return retVal;
} }
@ -259,4 +281,25 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
return null; return null;
} }
@Override
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
return myInterceptorBroadcaster;
}
@Override
protected DaoConfig getConfig() {
return myDaoConfig;
}
@Override
public FhirContext getContext() {
return myFhirContext;
}
@VisibleForTesting
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
myDaoConfig = theDaoConfig;
}
} }

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao;
* #L% * #L%
*/ */
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.model.dstu2.composite.MetaDt; import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Bundle;

View File

@ -0,0 +1,52 @@
package ca.uhn.fhir.jpa.dao;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import org.hl7.fhir.instance.model.api.IBaseResource;
import javax.annotation.Nullable;
import java.util.Collection;
public interface IJpaStorageResourceParser extends IStorageResourceParser {
/**
* Convert a storage entity into a FHIR resource model instance
*/
<R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation);
/**
* Populate the metadata (Resource.meta.*) from a storage entity and other related
* objects pulled from the database
*/
<R extends IBaseResource> R populateResourceMetadata(IBaseResourceEntity theEntitySource, boolean theForHistoryOperation, @Nullable Collection<? extends BaseTag> tagList, long theVersion, R theResourceTarget);
/**
* Populates a resource model object's metadata (Resource.meta.*) based on the
* values from a stroage entity.
*
* @param theEntitySource The source
* @param theResourceTarget The target
*/
void updateResourceMetadata(IBaseResourceEntity theEntitySource, IBaseResource theResourceTarget);
}

View File

@ -0,0 +1,490 @@
package ca.uhn.fhir.jpa.dao;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.Tag;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.util.MetaUtil;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseMetaType;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.cleanProvenanceSourceUri;
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.decodeResource;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class JpaStorageResourceParser implements IJpaStorageResourceParser {
public static final LenientErrorHandler LENIENT_ERROR_HANDLER = new LenientErrorHandler(false).setErrorOnInvalidValue(false);
private static final Logger ourLog = LoggerFactory.getLogger(JpaStorageResourceParser.class);
@Autowired
private FhirContext myContext;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
private PartitionSettings myPartitionSettings;
@Autowired
private IPartitionLookupSvc myPartitionLookupSvc;
@Override
public IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation) {
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
Class<? extends IBaseResource> resourceType = type.getImplementingClass();
return toResource(resourceType, (IBaseResourceEntity) theEntity, null, theForHistoryOperation);
}
@Override
public <R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation) {
// 1. get resource, it's encoding and the tags if any
byte[] resourceBytes;
String resourceText;
ResourceEncodingEnum resourceEncoding;
@Nullable
Collection<? extends BaseTag> tagList = Collections.emptyList();
long version;
String provenanceSourceUri = null;
String provenanceRequestId = null;
if (theEntity instanceof ResourceHistoryTable) {
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource();
resourceText = history.getResourceTextVc();
resourceEncoding = history.getEncoding();
switch (myDaoConfig.getTagStorageMode()) {
case VERSIONED:
default:
if (history.isHasTags()) {
tagList = history.getTags();
}
break;
case NON_VERSIONED:
if (history.getResourceTable().isHasTags()) {
tagList = history.getResourceTable().getTags();
}
break;
case INLINE:
tagList = null;
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
provenanceSourceUri = history.getProvenance().getSourceUri();
}
} else if (theEntity instanceof ResourceTable) {
ResourceTable resource = (ResourceTable) theEntity;
ResourceHistoryTable history;
if (resource.getCurrentVersionEntity() != null) {
history = resource.getCurrentVersionEntity();
} else {
version = theEntity.getVersion();
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
((ResourceTable) theEntity).setCurrentVersionEntity(history);
while (history == null) {
if (version > 1L) {
version--;
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
} else {
return null;
}
}
}
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
resourceText = history.getResourceTextVc();
switch (myDaoConfig.getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (resource.isHasTags()) {
tagList = resource.getTags();
}
break;
case INLINE:
tagList = null;
break;
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
provenanceSourceUri = history.getProvenance().getSourceUri();
}
} else if (theEntity instanceof ResourceSearchView) {
// This is the search View
ResourceSearchView view = (ResourceSearchView) theEntity;
resourceBytes = view.getResource();
resourceText = view.getResourceTextVc();
resourceEncoding = view.getEncoding();
version = view.getVersion();
provenanceRequestId = view.getProvenanceRequestId();
provenanceSourceUri = view.getProvenanceSourceUri();
switch (myDaoConfig.getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (theTagList != null) {
tagList = theTagList;
}
break;
case INLINE:
tagList = null;
break;
}
} else {
// something wrong
return null;
}
// 2. get The text
String decodedResourceText = decodedResourceText(resourceBytes, resourceText, resourceEncoding);
// 3. Use the appropriate custom type if one is specified in the context
Class<R> resourceType = determineTypeToParse(theResourceType, tagList);
// 4. parse the text to FHIR
R retVal = parseResource(theEntity, resourceEncoding, decodedResourceText, resourceType);
// 5. fill MetaData
retVal = populateResourceMetadata(theEntity, theForHistoryOperation, tagList, version, retVal);
// 6. Handle source (provenance)
populateResourceSource(provenanceSourceUri, provenanceRequestId, retVal);
// 7. Add partition information
populateResourcePartitionInformation(theEntity, retVal);
return retVal;
}
private <R extends IBaseResource> void populateResourcePartitionInformation(IBaseResourceEntity theEntity, R retVal) {
if (myPartitionSettings.isPartitioningEnabled()) {
PartitionablePartitionId partitionId = theEntity.getPartitionId();
if (partitionId != null && partitionId.getPartitionId() != null) {
PartitionEntity persistedPartition = myPartitionLookupSvc.getPartitionById(partitionId.getPartitionId());
retVal.setUserData(Constants.RESOURCE_PARTITION_ID, persistedPartition.toRequestPartitionId());
} else {
retVal.setUserData(Constants.RESOURCE_PARTITION_ID, null);
}
}
}
private <R extends IBaseResource> void populateResourceSource(String provenanceSourceUri, String provenanceRequestId, R retVal) {
if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
String sourceString = cleanProvenanceSourceUri(provenanceSourceUri)
+ (isNotBlank(provenanceRequestId) ? "#" : "")
+ defaultString(provenanceRequestId);
MetaUtil.setSource(myContext, retVal, sourceString);
}
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum resourceEncoding, String decodedResourceText, Class<R> resourceType) {
R retVal;
if (resourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId());
try {
retVal = parser.parseResource(resourceType, decodedResourceText);
} catch (Exception e) {
StringBuilder b = new StringBuilder();
b.append("Failed to parse database resource[");
b.append(myContext.getResourceType(resourceType));
b.append("/");
b.append(theEntity.getIdDt().getIdPart());
b.append(" (pid ");
b.append(theEntity.getId());
b.append(", version ");
b.append(theEntity.getFhirVersion().name());
b.append("): ");
b.append(e.getMessage());
String msg = b.toString();
ourLog.error(msg, e);
throw new DataFormatException(Msg.code(928) + msg, e);
}
} else {
retVal = (R) myContext.getResourceDefinition(theEntity.getResourceType()).newInstance();
}
return retVal;
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> Class<R> determineTypeToParse(Class<R> theResourceType, @Nullable Collection<? extends BaseTag> tagList) {
Class<R> resourceType = theResourceType;
if (tagList != null) {
if (myContext.hasDefaultTypeForProfile()) {
for (BaseTag nextTag : tagList) {
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
String profile = nextTag.getTag().getCode();
if (isNotBlank(profile)) {
Class<? extends IBaseResource> newType = myContext.getDefaultTypeForProfile(profile);
if (newType != null && theResourceType.isAssignableFrom(newType)) {
ourLog.debug("Using custom type {} for profile: {}", newType.getName(), profile);
resourceType = (Class<R>) newType;
break;
}
}
}
}
}
}
return resourceType;
}
@SuppressWarnings("unchecked")
@Override
public <R extends IBaseResource> R populateResourceMetadata(IBaseResourceEntity theEntitySource, boolean theForHistoryOperation, @Nullable Collection<? extends BaseTag> tagList, long theVersion, R theResourceTarget) {
if (theResourceTarget instanceof IResource) {
IResource res = (IResource) theResourceTarget;
theResourceTarget = (R) populateResourceMetadataHapi(theEntitySource, tagList, theForHistoryOperation, res, theVersion);
} else {
IAnyResource res = (IAnyResource) theResourceTarget;
theResourceTarget = populateResourceMetadataRi(theEntitySource, tagList, theForHistoryOperation, res, theVersion);
}
return theResourceTarget;
}
@SuppressWarnings("unchecked")
private <R extends IResource> R populateResourceMetadataHapi(IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, R res, Long theVersion) {
R retVal = res;
if (theEntity.getDeleted() != null) {
res = (R) myContext.getResourceDefinition(res).newInstance();
retVal = res;
ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
if (theForHistoryOperation) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.DELETE);
}
} else if (theForHistoryOperation) {
/*
* If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
*/
Date published = theEntity.getPublished().getValue();
Date updated = theEntity.getUpdated().getValue();
if (published.equals(updated)) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.POST);
} else {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.PUT);
}
}
res.setId(theEntity.getIdDt().withVersion(theVersion.toString()));
ResourceMetadataKeyEnum.VERSION.put(res, Long.toString(theEntity.getVersion()));
ResourceMetadataKeyEnum.PUBLISHED.put(res, theEntity.getPublished());
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
if (theTagList != null) {
if (theEntity.isHasTags()) {
TagList tagList = new TagList();
List<IBaseCoding> securityLabels = new ArrayList<>();
List<IdDt> profiles = new ArrayList<>();
for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) {
case PROFILE:
profiles.add(new IdDt(next.getTag().getCode()));
break;
case SECURITY_LABEL:
IBaseCoding secLabel = (IBaseCoding) myContext.getVersion().newCodingDt();
secLabel.setSystem(next.getTag().getSystem());
secLabel.setCode(next.getTag().getCode());
secLabel.setDisplay(next.getTag().getDisplay());
securityLabels.add(secLabel);
break;
case TAG:
tagList.add(new Tag(next.getTag().getSystem(), next.getTag().getCode(), next.getTag().getDisplay()));
break;
}
}
if (tagList.size() > 0) {
ResourceMetadataKeyEnum.TAG_LIST.put(res, tagList);
}
if (securityLabels.size() > 0) {
ResourceMetadataKeyEnum.SECURITY_LABELS.put(res, toBaseCodingList(securityLabels));
}
if (profiles.size() > 0) {
ResourceMetadataKeyEnum.PROFILES.put(res, profiles);
}
}
}
return retVal;
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataRi(IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(res).newInstance();
retVal = (R) res;
ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
if (theForHistoryOperation) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, Bundle.HTTPVerb.DELETE.toCode());
}
} else if (theForHistoryOperation) {
/*
* If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
*/
Date published = theEntity.getPublished().getValue();
Date updated = theEntity.getUpdated().getValue();
if (published.equals(updated)) {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, Bundle.HTTPVerb.POST.toCode());
} else {
ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, Bundle.HTTPVerb.PUT.toCode());
}
}
res.getMeta().setLastUpdated(null);
res.getMeta().setVersionId(null);
updateResourceMetadata(theEntity, res);
res.setId(res.getIdElement().withVersion(theVersion.toString()));
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
if (theTagList != null) {
res.getMeta().getTag().clear();
res.getMeta().getProfile().clear();
res.getMeta().getSecurity().clear();
for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) {
case PROFILE:
res.getMeta().addProfile(next.getTag().getCode());
break;
case SECURITY_LABEL:
IBaseCoding sec = res.getMeta().addSecurity();
sec.setSystem(next.getTag().getSystem());
sec.setCode(next.getTag().getCode());
sec.setDisplay(next.getTag().getDisplay());
break;
case TAG:
IBaseCoding tag = res.getMeta().addTag();
tag.setSystem(next.getTag().getSystem());
tag.setCode(next.getTag().getCode());
tag.setDisplay(next.getTag().getDisplay());
break;
}
}
}
return retVal;
}
@Override
public void updateResourceMetadata(IBaseResourceEntity theEntitySource, IBaseResource theResourceTarget) {
IIdType id = theEntitySource.getIdDt();
if (myContext.getVersion().getVersion().isRi()) {
id = myContext.getVersion().newIdType().setValue(id.getValue());
}
if (id.hasResourceType() == false) {
id = id.withResourceType(theEntitySource.getResourceType());
}
theResourceTarget.setId(id);
if (theResourceTarget instanceof IResource) {
ResourceMetadataKeyEnum.VERSION.put((IResource) theResourceTarget, id.getVersionIdPart());
ResourceMetadataKeyEnum.UPDATED.put((IResource) theResourceTarget, theEntitySource.getUpdated());
} else {
IBaseMetaType meta = theResourceTarget.getMeta();
meta.setVersionId(id.getVersionIdPart());
meta.setLastUpdated(theEntitySource.getUpdatedDate());
}
}
private FhirContext getContext(FhirVersionEnum theVersion) {
Validate.notNull(theVersion, "theVersion must not be null");
if (theVersion == myContext.getVersion().getVersion()) {
return myContext;
}
return FhirContext.forCached(theVersion);
}
private static String decodedResourceText(byte[] resourceBytes, String resourceText, ResourceEncodingEnum resourceEncoding) {
String decodedResourceText;
if (resourceText != null) {
decodedResourceText = resourceText;
} else {
decodedResourceText = decodeResource(resourceBytes, resourceEncoding);
}
return decodedResourceText;
}
private static List<BaseCodingDt> toBaseCodingList(List<IBaseCoding> theSecurityLabels) {
ArrayList<BaseCodingDt> retVal = new ArrayList<>(theSecurityLabels.size());
for (IBaseCoding next : theSecurityLabels) {
retVal.add((BaseCodingDt) next);
}
return retVal;
}
}

View File

@ -25,9 +25,10 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
@ -41,19 +42,18 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao; import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.model.entity.ForcedId; import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
@ -123,11 +123,13 @@ public class ResourceExpungeService implements IResourceExpungeService {
private DaoConfig myDaoConfig; private DaoConfig myDaoConfig;
@Autowired @Autowired
private MemoryCacheService myMemoryCacheService; private MemoryCacheService myMemoryCacheService;
@Autowired
private IJpaStorageResourceParser myJpaStorageResourceParser;
@Override @Override
@Transactional @Transactional
public List<ResourcePersistentId> findHistoricalVersionsOfNonDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) { public List<ResourcePersistentId> findHistoricalVersionsOfNonDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) {
if(isEmptyQuery(theRemainingCount)){ if (isEmptyQuery(theRemainingCount)) {
return Collections.EMPTY_LIST; return Collections.EMPTY_LIST;
} }
@ -154,7 +156,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
@Override @Override
@Transactional @Transactional
public List<ResourcePersistentId> findHistoricalVersionsOfDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) { public List<ResourcePersistentId> findHistoricalVersionsOfDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) {
if(isEmptyQuery(theRemainingCount)){ if (isEmptyQuery(theRemainingCount)) {
return Collections.EMPTY_LIST; return Collections.EMPTY_LIST;
} }
@ -192,7 +194,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
* be optimized, but expunge is hopefully not frequently called on busy servers * be optimized, but expunge is hopefully not frequently called on busy servers
* so it shouldn't be too big a deal. * so it shouldn't be too big a deal.
*/ */
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization(){ TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
@Override @Override
public void afterCommit() { public void afterCommit() {
myMemoryCacheService.invalidateAllCaches(); myMemoryCacheService.invalidateAllCaches();
@ -220,8 +222,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
private void callHooks(RequestDetails theRequestDetails, AtomicInteger theRemainingCount, ResourceHistoryTable theVersion, IdDt theId) { private void callHooks(RequestDetails theRequestDetails, AtomicInteger theRemainingCount, ResourceHistoryTable theVersion, IdDt theId) {
final AtomicInteger counter = new AtomicInteger(); final AtomicInteger counter = new AtomicInteger();
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE, myInterceptorBroadcaster, theRequestDetails)) { if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE, myInterceptorBroadcaster, theRequestDetails)) {
IFhirResourceDao<?> resourceDao = myDaoRegistry.getResourceDao(theId.getResourceType()); IBaseResource resource = myJpaStorageResourceParser.toResource(theVersion, false);
IBaseResource resource = resourceDao.toResource(theVersion, false);
HookParams params = new HookParams() HookParams params = new HookParams()
.add(AtomicInteger.class, counter) .add(AtomicInteger.class, counter)
.add(IIdType.class, theId) .add(IIdType.class, theId)
@ -324,7 +325,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
private void expungeHistoricalVersionsOfId(RequestDetails theRequestDetails, Long myResourceId, AtomicInteger theRemainingCount) { private void expungeHistoricalVersionsOfId(RequestDetails theRequestDetails, Long myResourceId, AtomicInteger theRemainingCount) {
Pageable page; Pageable page;
synchronized (theRemainingCount){ synchronized (theRemainingCount) {
if (expungeLimitReached(theRemainingCount)) { if (expungeLimitReached(theRemainingCount)) {
return; return;
} }
@ -348,7 +349,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
return new SliceImpl<>(Collections.singletonList(myVersion.getId())); return new SliceImpl<>(Collections.singletonList(myVersion.getId()));
} }
private boolean isEmptyQuery(int theCount){ private boolean isEmptyQuery(int theCount) {
return theCount <= 0; return theCount <= 0;
} }

View File

@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@ -41,7 +42,7 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc {
private final DeleteExpungeSqlBuilder myDeleteExpungeSqlBuilder; private final DeleteExpungeSqlBuilder myDeleteExpungeSqlBuilder;
private final IFulltextSearchSvc myFullTextSearchSvc; private final IFulltextSearchSvc myFullTextSearchSvc;
public DeleteExpungeSvcImpl(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, IFulltextSearchSvc theFullTextSearchSvc) { public DeleteExpungeSvcImpl(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) {
myEntityManager = theEntityManager; myEntityManager = theEntityManager;
myDeleteExpungeSqlBuilder = theDeleteExpungeSqlBuilder; myDeleteExpungeSqlBuilder = theDeleteExpungeSqlBuilder;
myFullTextSearchSvc = theFullTextSearchSvc; myFullTextSearchSvc = theFullTextSearchSvc;

View File

@ -31,7 +31,9 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.HistoryBuilder; import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory; import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum; import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
@ -104,13 +106,15 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
private RequestPartitionHelperSvc myRequestPartitionHelperSvc; private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Autowired @Autowired
private DaoConfig myDaoConfig; private DaoConfig myDaoConfig;
@Autowired
private MemoryCacheService myMemoryCacheService;
@Autowired
private IJpaStorageResourceParser myJpaStorageResourceParser;
/* /*
* Non autowired fields (will be different for every instance * Non autowired fields (will be different for every instance
* of this class, since it's a prototype * of this class, since it's a prototype
*/ */
@Autowired
private MemoryCacheService myMemoryCacheService;
private Search mySearchEntity; private Search mySearchEntity;
private String myUuid; private String myUuid;
private SearchCacheStatusEnum myCacheStatus; private SearchCacheStatusEnum myCacheStatus;
@ -162,7 +166,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
resource = next; resource = next;
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(next.getResourceType()); IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(next.getResourceType());
retVal.add(dao.toResource(resource, true)); retVal.add(myJpaStorageResourceParser.toResource(resource, true));
} }

View File

@ -23,11 +23,20 @@ package ca.uhn.fhir.jpa.search;
import ca.uhn.fhir.jpa.config.JpaConfig; import ca.uhn.fhir.jpa.config.JpaConfig;
import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask; import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.HistorySearchStyleEnum;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import java.util.Date;
import java.util.UUID;
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
public class PersistedJpaBundleProviderFactory { public class PersistedJpaBundleProviderFactory {
@Autowired @Autowired
@ -46,4 +55,28 @@ public class PersistedJpaBundleProviderFactory {
public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder) { public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder) {
return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder); return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder);
} }
public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset) {
return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null);
}
public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType) {
String resourceName = defaultIfBlank(theResourceType, null);
Search search = new Search();
search.setOffset(theOffset);
search.setDeleted(false);
search.setCreated(new Date());
search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive);
search.setUuid(UUID.randomUUID().toString());
search.setResourceType(resourceName);
search.setResourceId(theResourcePid);
search.setSearchType(SearchTypeEnum.HISTORY);
search.setStatus(SearchStatusEnum.FINISHED);
search.setHistorySearchStyle(searchParameterType);
return newInstance(theRequest, search);
}
} }

View File

@ -39,6 +39,7 @@ import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.dao.BaseStorageDao; import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao; import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
@ -144,20 +145,34 @@ public class SearchBuilder implements ISearchBuilder {
@Deprecated @Deprecated
public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE;
public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50; public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50;
public static final String RESOURCE_ID_ALIAS = "resource_id";
public static final String RESOURCE_VERSION_ALIAS = "resource_version";
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
private static final ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L); private static final ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L);
private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid";
private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid";
private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType";
private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType";
private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion";
public static final String RESOURCE_ID_ALIAS = "resource_id";
public static final String RESOURCE_VERSION_ALIAS = "resource_version";
public static boolean myUseMaxPageSize50ForTest = false; public static boolean myUseMaxPageSize50ForTest = false;
protected final IInterceptorBroadcaster myInterceptorBroadcaster;
protected final IResourceTagDao myResourceTagDao;
private final String myResourceName; private final String myResourceName;
private final Class<? extends IBaseResource> myResourceType; private final Class<? extends IBaseResource> myResourceType;
private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
private final SqlObjectFactory mySqlBuilderFactory;
private final HibernatePropertiesProvider myDialectProvider;
private final ModelConfig myModelConfig;
private final ISearchParamRegistry mySearchParamRegistry;
private final PartitionSettings myPartitionSettings;
private final DaoRegistry myDaoRegistry;
private final IResourceSearchViewDao myResourceSearchViewDao;
private final FhirContext myContext;
private final IIdHelperService myIdHelperService;
private final DaoConfig myDaoConfig;
private final IDao myCallingDao;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
private List<ResourcePersistentId> myAlsoIncludePids; private List<ResourcePersistentId> myAlsoIncludePids;
private CriteriaBuilder myCriteriaBuilder; private CriteriaBuilder myCriteriaBuilder;
private SearchParameterMap myParams; private SearchParameterMap myParams;
@ -167,30 +182,12 @@ public class SearchBuilder implements ISearchBuilder {
private Set<ResourcePersistentId> myPidSet; private Set<ResourcePersistentId> myPidSet;
private boolean myHasNextIteratorQuery = false; private boolean myHasNextIteratorQuery = false;
private RequestPartitionId myRequestPartitionId; private RequestPartitionId myRequestPartitionId;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@Autowired(required = false) @Autowired(required = false)
private IFulltextSearchSvc myFulltextSearchSvc; private IFulltextSearchSvc myFulltextSearchSvc;
@Autowired(required = false) @Autowired(required = false)
private IElasticsearchSvc myIElasticsearchSvc; private IElasticsearchSvc myIElasticsearchSvc;
@Autowired
private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; private IJpaStorageResourceParser myJpaStorageResourceParser;
private final SqlObjectFactory mySqlBuilderFactory;
private final HibernatePropertiesProvider myDialectProvider;
private final ModelConfig myModelConfig;
private final ISearchParamRegistry mySearchParamRegistry;
private final PartitionSettings myPartitionSettings;
protected final IInterceptorBroadcaster myInterceptorBroadcaster;
protected final IResourceTagDao myResourceTagDao;
private final DaoRegistry myDaoRegistry;
private final IResourceSearchViewDao myResourceSearchViewDao;
private final FhirContext myContext;
private final IIdHelperService myIdHelperService;
private final DaoConfig myDaoConfig;
private final IDao myCallingDao;
/** /**
* Constructor * Constructor
@ -893,7 +890,7 @@ public class SearchBuilder implements ISearchBuilder {
IBaseResource resource = null; IBaseResource resource = null;
if (next != null) { if (next != null) {
resource = myCallingDao.toResource(resourceType, next, tagMap.get(next.getId()), theForHistoryOperation); resource = myJpaStorageResourceParser.toResource(resourceType, next, tagMap.get(next.getId()), theForHistoryOperation);
} }
if (resource == null) { if (resource == null) {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion()); ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());

View File

@ -38,6 +38,9 @@ import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.config.util.ConnectionPoolInfoProvider; import ca.uhn.fhir.jpa.config.util.ConnectionPoolInfoProvider;
import ca.uhn.fhir.jpa.config.util.IConnectionPoolInfoProvider; import ca.uhn.fhir.jpa.config.util.IConnectionPoolInfoProvider;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.IStorageResourceParser;
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
@ -264,6 +267,8 @@ public class TermReadSvcImpl implements ITermReadSvc {
private CachingValidationSupport myCachingValidationSupport; private CachingValidationSupport myCachingValidationSupport;
@Autowired @Autowired
private VersionCanonicalizer myVersionCanonicalizer; private VersionCanonicalizer myVersionCanonicalizer;
@Autowired
private IJpaStorageResourceParser myJpaStorageResourceParser;
@Override @Override
public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) { public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) {
@ -2434,7 +2439,7 @@ public class TermReadSvcImpl implements ITermReadSvc {
+ ForcedId.IDX_FORCEDID_TYPE_FID + " removed?"); + ForcedId.IDX_FORCEDID_TYPE_FID + " removed?");
IFhirResourceDao<CodeSystem> csDao = myDaoRegistry.getResourceDao("CodeSystem"); IFhirResourceDao<CodeSystem> csDao = myDaoRegistry.getResourceDao("CodeSystem");
IBaseResource cs = csDao.toResource(resultList.get(0), false); IBaseResource cs = myJpaStorageResourceParser.toResource(resultList.get(0), false);
return Optional.of(cs); return Optional.of(cs);
} }
@ -2523,7 +2528,7 @@ public class TermReadSvcImpl implements ITermReadSvc {
private org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable) { private org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable) {
Class<? extends IBaseResource> type = getFhirContext().getResourceDefinition("ValueSet").getImplementingClass(); Class<? extends IBaseResource> type = getFhirContext().getResourceDefinition("ValueSet").getImplementingClass();
IBaseResource valueSet = myDaoRegistry.getResourceDao("ValueSet").toResource(type, theResourceTable, null, false); IBaseResource valueSet = myJpaStorageResourceParser.toResource(type, theResourceTable, null, false);
return myVersionCanonicalizer.valueSetToCanonical(valueSet); return myVersionCanonicalizer.valueSetToCanonical(valueSet);
} }

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -20,14 +20,15 @@ package ca.uhn.fhir.jpa.model.cross;
* #L% * #L%
*/ */
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import java.util.Date;
public interface IBasePersistedResource extends IResourceLookup { public interface IBasePersistedResource extends IResourceLookup {
IIdType getIdDt(); IIdType getIdDt();
long getVersion();
boolean isDeleted();
void setNotDeleted();
} }

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.ParserOptions; import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.dstu2.model.Subscription; import org.hl7.fhir.dstu2.model.Subscription;
@ -79,6 +80,30 @@ public class ModelConfig {
private boolean myDefaultSearchParamsCanBeOverridden = true; private boolean myDefaultSearchParamsCanBeOverridden = true;
private Set<Subscription.SubscriptionChannelType> mySupportedSubscriptionTypes = new HashSet<>(); private Set<Subscription.SubscriptionChannelType> mySupportedSubscriptionTypes = new HashSet<>();
private boolean myCrossPartitionSubscription = false; private boolean myCrossPartitionSubscription = false;
/**
* If set to true, attempt to map terminology for bulk export jobs using the
* logic in
* {@link ResponseTerminologyTranslationSvc}. Default is <code>false</code>.
*
* @since 6.3.0
*/
public boolean isNormalizeTerminologyForBulkExportJobs() {
return myNormalizeTerminologyForBulkExportJobs;
}
/**
* If set to true, attempt to map terminology for bulk export jobs using the
* logic in
* {@link ResponseTerminologyTranslationSvc}. Default is <code>false</code>.
*
* @since 6.3.0
*/
public void setNormalizeTerminologyForBulkExportJobs(boolean theNormalizeTerminologyForBulkExportJobs) {
myNormalizeTerminologyForBulkExportJobs = theNormalizeTerminologyForBulkExportJobs;
}
private boolean myNormalizeTerminologyForBulkExportJobs = false;
private String myEmailFromAddress = "noreply@unknown.com"; private String myEmailFromAddress = "noreply@unknown.com";
private String myWebsocketContextPath = DEFAULT_WEBSOCKET_CONTEXT_PATH; private String myWebsocketContextPath = DEFAULT_WEBSOCKET_CONTEXT_PATH;
/** /**

View File

@ -212,6 +212,16 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return myResourceVersion; return myResourceVersion;
} }
@Override
public boolean isDeleted() {
return getDeleted() != null;
}
@Override
public void setNotDeleted() {
setDeleted(null);
}
public void setVersion(long theVersion) { public void setVersion(long theVersion) {
myResourceVersion = theVersion; myResourceVersion = theVersion;
} }

View File

@ -548,6 +548,16 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
return myVersion; return myVersion;
} }
@Override
public boolean isDeleted() {
return getDeleted() != null;
}
@Override
public void setNotDeleted() {
setDeleted(null);
}
public void setVersion(long theVersion) { public void setVersion(long theVersion) {
myVersion = theVersion; myVersion = theVersion;
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -48,14 +48,16 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
CodeSystem codeSystem = new CodeSystem(); CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("http://foo"); codeSystem.setUrl("http://foo");
IIdType id = myCodeSystemDao.create(codeSystem).getId().toUnqualifiedVersionless(); IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualifiedVersionless();
myCodeSystemDao.delete(id); myCodeSystemDao.delete(id, mySrd);
codeSystem = new CodeSystem(); codeSystem = new CodeSystem();
codeSystem.setUrl("http://foo"); codeSystem.setUrl("http://foo");
myCodeSystemDao.update(codeSystem, "Patient?name=FAM").getId().toUnqualifiedVersionless(); IIdType id2 = myCodeSystemDao.update(codeSystem, "CodeSystem?url=http://foo", mySrd).getId();
assertNotEquals(id.getIdPart(), id2.getIdPart());
assertEquals("1", id2.getVersionIdPart());
} }
@Test @Test

View File

@ -1117,7 +1117,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
String encoded = myFhirContext.newXmlParser().encodeResourceToString(response.getOperationOutcome()); String encoded = myFhirContext.newXmlParser().encodeResourceToString(response.getOperationOutcome());
ourLog.info(encoded); ourLog.info(encoded);
assertThat(encoded, containsString( assertThat(encoded, containsString(
"<issue><severity value=\"information\"/><code value=\"informational\"/><diagnostics value=\"Successfully deleted 2 resource(s) in ")); "<diagnostics value=\"Successfully deleted 2 resource(s)"));
try { try {
myClient.read().resource("Patient").withId(id1).execute(); myClient.read().resource("Patient").withId(id1).execute();
fail(); fail();
@ -1144,7 +1144,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
ourLog.info(response); ourLog.info(response);
assertEquals(200, resp.getStatusLine().getStatusCode()); assertEquals(200, resp.getStatusLine().getStatusCode());
assertThat(response, containsString( assertThat(response, containsString(
"<issue><severity value=\"warning\"/><code value=\"not-found\"/><diagnostics value=\"Unable to find resource matching URL &quot;Patient?identifier=testDeleteConditionalNoMatches&quot;. Deletion failed.\"/></issue>")); "<diagnostics value=\"Unable to find resource matching URL &quot;Patient?identifier=testDeleteConditionalNoMatches&quot;. Nothing has been deleted.\"/>"));
} finally { } finally {
IOUtils.closeQuietly(resp); IOUtils.closeQuietly(resp);
} }
@ -1214,7 +1214,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp); ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp); OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in ")); assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s). Took "));
} finally { } finally {
response.close(); response.close();
} }
@ -1241,7 +1241,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp); ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp); OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL \"Patient?name=testDeleteResourceConditional1\". Deletion failed.")); assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL \"Patient?name=testDeleteResourceConditional1\". Nothing has been deleted."));
} finally { } finally {
response.close(); response.close();
} }
@ -1322,7 +1322,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
MethodOutcome resp = myClient.delete().resourceById(id).execute(); MethodOutcome resp = myClient.delete().resourceById(id).execute();
OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome(); OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in ")); assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s). Took"));
} }
/** /**

View File

@ -192,14 +192,11 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
@Test @Test
public void testPatchUsingJsonPatch_Transaction() throws Exception { public void testPatchUsingJsonPatch_Transaction() {
String methodName = "testPatchUsingJsonPatch_Transaction";
IIdType pid1; IIdType pid1;
{ {
Patient patient = new Patient(); Patient patient = new Patient();
patient.setActive(true); patient.setActive(true);
patient.addIdentifier().setSystem("urn:system").setValue("0");
patient.addName().setFamily(methodName).addGiven("Joe");
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
} }
@ -224,6 +221,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
.getRequest().setUrl(pid1.getValue()).setMethod(HTTPVerb.PUT); .getRequest().setUrl(pid1.getValue()).setMethod(HTTPVerb.PUT);
Bundle bundle = ourClient.transaction().withBundle(input).execute(); Bundle bundle = ourClient.transaction().withBundle(input).execute();
ourLog.info("Response: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle));
//Validate over all bundle response entry contents. //Validate over all bundle response entry contents.
assertThat(bundle.getType(), is(equalTo(Bundle.BundleType.TRANSACTIONRESPONSE))); assertThat(bundle.getType(), is(equalTo(Bundle.BundleType.TRANSACTIONRESPONSE)));

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -64,11 +64,11 @@ import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class) @ExtendWith(MockitoExtension.class)
public class BaseHapiFhirDaoTest { public class BaseHapiFhirDaoTest {
private static class TestDao extends BaseHapiFhirDao<Patient> { private static class TestDao extends BaseHapiFhirResourceDao<Patient> {
@Nullable @Nullable
@Override @Override
protected String getResourceName() { public String getResourceName() {
return "Patient"; return "Patient";
} }

View File

@ -171,7 +171,7 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
p = new Patient(); p = new Patient();
p.addName().setFamily("PATIENT3"); p.addName().setFamily("PATIENT3");
id2 = myPatientDao.update(p, "Patient?family=ZZZ", mySrd).getId().getIdPartAsLong(); id2 = myPatientDao.update(p, "Patient?family=PATIENT3", mySrd).getId().getIdPartAsLong();
assertNotEquals(id, id2); assertNotEquals(id, id2);
detailsCapt = ArgumentCaptor.forClass(RequestDetails.class); detailsCapt = ArgumentCaptor.forClass(RequestDetails.class);

View File

@ -4,8 +4,10 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.gclient.TokenClientParam; import ca.uhn.fhir.rest.gclient.TokenClientParam;
import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.Enumerations;
@ -24,6 +26,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
@SuppressWarnings({"Duplicates"}) @SuppressWarnings({"Duplicates"})
public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test { public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
@ -111,6 +115,119 @@ public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
patient = (Patient) myPatientDao.update(patient, mySrd).getResource(); patient = (Patient) myPatientDao.update(patient, mySrd).getResource();
myCaptureQueriesListener.logAllQueries(); myCaptureQueriesListener.logAllQueries();
runInTransaction(() -> assertEquals(3, myResourceTagDao.count())); runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
// Read it back
patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
}
/**
* Make sure tags are preserved
*/
@Test
public void testDeleteResourceWithTags_NonVersionedTags_InTransaction() {
initializeNonVersioned();
when(mySrd.getHeader(eq(Constants.HEADER_PREFER))).thenReturn("return=representation");
Bundle input, output;
// Delete
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
input = new BundleBuilder(myFhirContext)
.addTransactionDeleteEntry(new IdType("Patient/A"))
.andThen()
.getBundleTyped();
output = mySystemDao.transaction(mySrd, input);
IIdType outcomeId = new IdType(output.getEntry().get(0).getResponse().getLocation());
assertEquals("3", outcomeId.getVersionIdPart());
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
// Make sure $meta-get can fetch the tags of the deleted resource
Meta meta = myPatientDao.metaGetOperation(Meta.class, new IdType("Patient/A"), mySrd);
assertThat(toProfiles(meta).toString(), toProfiles(meta), contains("http://profile2"));
assertThat(toTags(meta).toString(), toTags(meta), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
assertEquals("3", meta.getVersionId());
// Revive and verify
Patient patient = new Patient();
patient.setId("A");
patient.getMeta().addProfile("http://profile3");
patient.setActive(true);
myCaptureQueriesListener.clear();
input = new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(patient)
.andThen()
.getBundleTyped();
output = mySystemDao.transaction(mySrd, input);
patient = (Patient) output.getEntry().get(0).getResource();
assert patient != null;
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
myCaptureQueriesListener.logAllQueries();
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
// Read it back
patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
}
/**
* Make sure tags are preserved
*/
@Test
public void testDeleteResourceWithTags_VersionedTags_InTransaction() {
initializeVersioned();
when(mySrd.getHeader(eq(Constants.HEADER_PREFER))).thenReturn("return=representation");
Bundle input, output;
// Delete
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
input = new BundleBuilder(myFhirContext)
.addTransactionDeleteEntry(new IdType("Patient/A"))
.andThen()
.getBundleTyped();
output = mySystemDao.transaction(mySrd, input);
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
// Make sure $meta-get can fetch the tags of the deleted resource
Meta meta = myPatientDao.metaGetOperation(Meta.class, new IdType("Patient/A"), mySrd);
assertThat(toProfiles(meta).toString(), toProfiles(meta), contains("http://profile2"));
assertThat(toTags(meta).toString(), toTags(meta), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
// Revive and verify
Patient patient = new Patient();
patient.setId("A");
patient.getMeta().addProfile("http://profile3");
patient.setActive(true);
myCaptureQueriesListener.clear();
input = new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(patient)
.andThen()
.getBundleTyped();
output = mySystemDao.transaction(mySrd, input);
patient = (Patient) output.getEntry().get(0).getResource();
assert patient != null;
myCaptureQueriesListener.logAllQueries();
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
// Read it back // Read it back

View File

@ -469,22 +469,7 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
} }
@Test @Test
public void testReCreateMatchResource() { public void testUpdateAndGetHistoryResource() {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("http://foo");
IIdType id = myCodeSystemDao.create(codeSystem).getId().toUnqualifiedVersionless();
myCodeSystemDao.delete(id);
codeSystem = new CodeSystem();
codeSystem.setUrl("http://foo");
myCodeSystemDao.update(codeSystem, "Patient?name=FAM").getId().toUnqualifiedVersionless();
}
@Test
public void testUpdateAndGetHistoryResource() throws InterruptedException {
Patient patient = new Patient(); Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001"); patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester").addGiven("Joe"); patient.addName().setFamily("Tester").addGiven("Joe");

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.interceptor;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults; import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner; import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.util.BulkExportUtils; import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
@ -36,9 +37,8 @@ import static org.junit.jupiter.api.Assertions.fail;
public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceProviderR4Test { public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseTerminologyTranslationInterceptorTest.class);
public static final String TEST_OBV_FILTER = "Observation?status=amended"; public static final String TEST_OBV_FILTER = "Observation?status=amended";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseTerminologyTranslationInterceptorTest.class);
@Autowired @Autowired
private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor; private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor;
@ -55,6 +55,7 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
public void afterEach() { public void afterEach() {
myResponseTerminologyTranslationInterceptor.clearMappingSpecifications(); myResponseTerminologyTranslationInterceptor.clearMappingSpecifications();
myServer.unregisterInterceptor(myResponseTerminologyTranslationInterceptor); myServer.unregisterInterceptor(myResponseTerminologyTranslationInterceptor);
myModelConfig.setNormalizeTerminologyForBulkExportJobs(new ModelConfig().isNormalizeTerminologyForBulkExportJobs());
} }
@Test @Test
@ -139,6 +140,8 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
@Test @Test
public void testBulkExport_TerminologyTranslation_MappingFound() { public void testBulkExport_TerminologyTranslation_MappingFound() {
myModelConfig.setNormalizeTerminologyForBulkExportJobs(true);
// Create some resources to load // Create some resources to load
Observation observation = new Observation(); Observation observation = new Observation();
observation.setStatus(Observation.ObservationStatus.AMENDED); observation.setStatus(Observation.ObservationStatus.AMENDED);
@ -157,6 +160,8 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
@Test @Test
public void testBulkExport_TerminologyTranslation_MappingNotNeeded() { public void testBulkExport_TerminologyTranslation_MappingNotNeeded() {
myModelConfig.setNormalizeTerminologyForBulkExportJobs(true);
// Create some resources to load // Create some resources to load
Observation observation = new Observation(); Observation observation = new Observation();
observation.setStatus(Observation.ObservationStatus.AMENDED); observation.setStatus(Observation.ObservationStatus.AMENDED);
@ -176,6 +181,8 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
@Test @Test
public void testBulkExport_TerminologyTranslation_NoMapping() { public void testBulkExport_TerminologyTranslation_NoMapping() {
myModelConfig.setNormalizeTerminologyForBulkExportJobs(true);
// Create some resources to load // Create some resources to load
Observation observation = new Observation(); Observation observation = new Observation();
observation.setStatus(Observation.ObservationStatus.AMENDED); observation.setStatus(Observation.ObservationStatus.AMENDED);

View File

@ -0,0 +1,686 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.util.BundleBuilder;
import org.hamcrest.Matcher;
import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.OperationOutcome;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static ca.uhn.fhir.util.TestUtil.sleepAtLeast;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsInRelativeOrder;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.matchesPattern;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.stringContainsInOrder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@SuppressWarnings("Duplicates")
public class ResourceProviderMeaningfulOutcomeMessageR4Test extends BaseResourceProviderR4Test {
@BeforeEach
@Override
public void before() throws Exception {
super.before();
HapiLocalizer.setOurFailOnMissingMessage(true);
myDaoConfig.setAllowMultipleDelete(true);
}
@AfterEach
@Override
public void after() {
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
}
@Test
public void testCreateUpdateDelete() {
// Initial Create-with-client-assigned-ID
Patient p = new Patient();
p.setId("Patient/A");
p.setActive(true);
OperationOutcome oo = (OperationOutcome) myClient
.update()
.resource(p)
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateAsCreate", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_AS_CREATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Update with change
p.setId("Patient/A");
p.setActive(false);
oo = (OperationOutcome) myClient
.update()
.resource(p)
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdate", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Update with no change
p.setId("Patient/A");
oo = (OperationOutcome) myClient
.update()
.resource(p)
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Initial create: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateNoChange", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Delete
oo = (OperationOutcome) myClient
.delete()
.resourceById("Patient", "A")
.execute()
.getOperationOutcome();
ourLog.info("Delete: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Delete with no change
oo = (OperationOutcome) myClient
.delete()
.resourceById("Patient", "A")
.execute()
.getOperationOutcome();
ourLog.info("Delete: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("deleteResourceAlreadyDeleted"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testCreateUpdateDelete_InTransaction() {
// Initial Create-with-client-assigned-ID
Patient p = new Patient();
p.setId("Patient/A");
p.setActive(true);
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(p)
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Initial create: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateAsCreate", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_AS_CREATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Update with change
p.setId("Patient/A");
p.setActive(false);
input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(p)
.andThen()
.getBundle();
output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdate"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Update with no change
p.setId("Patient/A");
input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(p)
.andThen()
.getBundle();
output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateNoChange"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Delete
input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionDeleteEntry("Patient", "A")
.andThen()
.getBundle();
output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
// Delete With No Change
input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionDeleteEntry("Patient", "A")
.andThen()
.getBundle();
output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("deleteResourceAlreadyDeleted"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testCreate_InTransaction() {
Patient p = new Patient();
p.setActive(true);
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionCreateEntry(p)
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulCreate", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CREATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalCreate_NoMatch_InTransaction() {
Patient p = new Patient();
p.setActive(true);
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionCreateEntry(p)
.conditional("Patient?active=true")
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(),
matchesPattern("Successfully conditionally created resource \".*\". No existing resources matched URL \"Patient\\?active=true\". Took [0-9]+ms."));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CREATE_NO_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalCreate_WithMatch_InTransaction() {
createPatient(withActiveTrue());
Patient p = new Patient();
p.setActive(true);
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionCreateEntry(p)
.conditional("Patient?active=true")
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulCreateConditionalWithMatch"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalUpdate_NoMatch() {
Patient p = new Patient();
p.setActive(true);
OperationOutcome oo = (OperationOutcome) myClient
.update()
.resource(p)
.conditionalByUrl("Patient?active=true")
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoMatch", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalUpdate_WithMatchAndChange() {
createPatient(withActiveTrue());
Patient p = new Patient();
p.setActive(true);
p.addName().setFamily("Test");
OperationOutcome oo = (OperationOutcome) myClient
.update()
.resource(p)
.conditionalByUrl("Patient?active=true")
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalWithMatch", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalUpdate_WithMatchNoChange() {
createPatient(withActiveTrue());
Patient p = new Patient();
p.setActive(true);
OperationOutcome oo = (OperationOutcome) myClient
.update()
.resource(p)
.conditionalByUrl("Patient?active=true")
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoChangeWithMatch", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalUpdate_NoMatch_InTransaction() {
Patient p = new Patient();
p.setActive(true);
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(p)
.conditional("Patient?active=true")
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoMatch", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalUpdate_WithMatchAndChange_InTransaction() {
createPatient(withActiveTrue());
Patient p = new Patient();
p.setActive(true);
p.addName().setFamily("Test");
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(p)
.conditional("Patient?active=true")
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalWithMatch"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testConditionalUpdate_WithMatchNoChange_InTransaction() {
createPatient(withActiveTrue());
Patient p = new Patient();
p.setActive(true);
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionUpdateEntry(p)
.conditional("Patient?active=true")
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoChangeWithMatch"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_WithChanges() {
createPatient(withId("A"), withActiveTrue());
Parameters patch = createPatchToSetPatientActiveFalse();
OperationOutcome oo = (OperationOutcome) myClient
.patch()
.withFhirPatch(patch)
.withId("Patient/A")
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatch", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_NoChanges() {
createPatient(withId("A"), withActiveFalse());
Parameters patch = createPatchToSetPatientActiveFalse();
OperationOutcome oo = (OperationOutcome) myClient
.patch()
.withFhirPatch(patch)
.withId("Patient/A")
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchNoChange", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_Conditional_MatchWithChanges() {
createPatient(withId("A"), withActiveTrue(), withBirthdate("2022-01-01"));
Parameters patch = createPatchToSetPatientActiveFalse();
OperationOutcome oo = (OperationOutcome) myClient
.patch()
.withFhirPatch(patch)
.conditionalByUrl("Patient?birthdate=2022-01-01")
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditional", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_Conditional_MatchNoChanges() {
createPatient(withId("A"), withActiveFalse(), withBirthdate("2022-01-01"));
Parameters patch = createPatchToSetPatientActiveFalse();
OperationOutcome oo = (OperationOutcome) myClient
.patch()
.withFhirPatch(patch)
.conditionalByUrl("Patient?birthdate=2022-01-01")
.prefer(PreferReturnEnum.OPERATION_OUTCOME)
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditionalNoChange", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_WithChanges_InTransaction() {
createPatient(withId("A"), withActiveTrue());
Parameters patch = createPatchToSetPatientActiveFalse();
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionFhirPatchEntry(new IdType("Patient/A"), patch);
Bundle response = myClient
.transaction()
.withBundle((Bundle)bb.getBundle())
.execute();
OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatch"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_NoChanges_InTransaction() {
createPatient(withId("A"), withActiveFalse());
Parameters patch = createPatchToSetPatientActiveFalse();
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionFhirPatchEntry(new IdType("Patient/A"), patch);
Bundle response = myClient
.transaction()
.withBundle((Bundle)bb.getBundle())
.execute();
OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchNoChange"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_Conditional_MatchWithChanges_InTransaction() {
createPatient(withId("A"), withActiveTrue(), withBirthdate("2022-01-01"));
Parameters patch = createPatchToSetPatientActiveFalse();
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionFhirPatchEntry(patch).conditional("Patient?birthdate=2022-01-01");
Bundle response = myClient
.transaction()
.withBundle((Bundle)bb.getBundle())
.execute();
OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditional"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testPatch_Conditional_MatchNoChanges_InTransaction() {
createPatient(withId("A"), withActiveFalse(), withBirthdate("2022-01-01"));
Parameters patch = createPatchToSetPatientActiveFalse();
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionFhirPatchEntry(patch).conditional("Patient?birthdate=2022-01-01");
Bundle response = myClient
.transaction()
.withBundle((Bundle)bb.getBundle())
.execute();
OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditionalNoChange"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testMultiDelete_NoneFound() {
OperationOutcome oo = (OperationOutcome) myClient
.delete()
.resourceConditionalByUrl("Patient?active=true")
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("unableToDeleteNotFound"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testMultiDelete_SomeFound() {
createPatient(withActiveTrue());
createPatient(withActiveTrue());
createPatient(withActiveTrue());
OperationOutcome oo = (OperationOutcome) myClient
.delete()
.resourceConditionalByUrl("Patient?active=true")
.execute()
.getOperationOutcome();
ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
@Test
public void testMultiDelete_SomeFound_InTransaction() {
createPatient(withActiveTrue());
createPatient(withActiveTrue());
createPatient(withActiveTrue());
Bundle input = (Bundle) new BundleBuilder(myFhirContext)
.addTransactionDeleteEntryConditional("Patient?active=true")
.andThen()
.getBundle();
Bundle output = myClient
.transaction()
.withBundle(input)
.execute();
ourLog.info("Delete {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
}
private static Parameters createPatchToSetPatientActiveFalse() {
Parameters patch = new Parameters();
Parameters.ParametersParameterComponent op = patch.addParameter().setName("operation");
op.addPart().setName("type").setValue(new CodeType("replace"));
op.addPart().setName("path").setValue(new CodeType("Patient.active"));
op.addPart().setName("value").setValue(new BooleanType(false));
return patch;
}
private Matcher<String> matchesHapiMessage(String... theMessageKey) {
StringBuilder joinedPattern = new StringBuilder();
for (var next : theMessageKey) {
String qualifiedKey = BaseStorageDao.class.getName() + "." + next;
String pattern = myFhirContext.getLocalizer().getFormatString(qualifiedKey);
assertTrue(isNotBlank(pattern));
pattern = pattern
.replace("\"", "\\\"")
.replace("(", "\\(")
.replace(")", "\\)")
.replace("[", "\\[")
.replace("]", "\\]")
.replace(".", "\\.")
.replaceAll("\\{[0-9]+}", ".*");
if (joinedPattern.length() > 0) {
joinedPattern.append(' ');
}
joinedPattern.append(pattern);
}
return matchesPattern(joinedPattern.toString());
}
}

View File

@ -1,7 +1,9 @@
package ca.uhn.fhir.jpa.provider.r4; package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.data.ISearchDao; import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig;
@ -14,6 +16,7 @@ import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder; import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
import ca.uhn.fhir.jpa.test.config.TestR4Config; import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.jpa.util.QueryParameterUtils;
import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.model.primitive.InstantDt;
@ -46,6 +49,7 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.ClasspathUtil; import ca.uhn.fhir.util.ClasspathUtil;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
@ -69,6 +73,7 @@ import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity; import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicNameValuePair; import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils; import org.apache.http.util.EntityUtils;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IAnyResource;
@ -165,7 +170,6 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.sql.DataSource;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
@ -263,6 +267,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
public void before() throws Exception { public void before() throws Exception {
super.before(); super.before();
myFhirContext.setParserErrorHandler(new StrictErrorHandler()); myFhirContext.setParserErrorHandler(new StrictErrorHandler());
HapiLocalizer.setOurFailOnMissingMessage(true);
myDaoConfig.setAllowMultipleDelete(true); myDaoConfig.setAllowMultipleDelete(true);
myClient.registerInterceptor(myCapturingInterceptor); myClient.registerInterceptor(myCapturingInterceptor);
@ -292,7 +297,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
@Test @Test
public void createResourceSearchParameter_withExpressionMetaSecurity_succeeds(){ public void createResourceSearchParameter_withExpressionMetaSecurity_succeeds() {
SearchParameter searchParameter = new SearchParameter(); SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("resource-security"); searchParameter.setId("resource-security");
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE); searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
@ -310,7 +315,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
@Test @Test
public void createSearchParameter_with2Expressions_succeeds(){ public void createSearchParameter_with2Expressions_succeeds() {
SearchParameter searchParameter = new SearchParameter(); SearchParameter searchParameter = new SearchParameter();
@ -320,7 +325,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
searchParameter.setType(Enumerations.SearchParamType.TOKEN); searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setExpression("Patient.gender|Person.gender"); searchParameter.setExpression("Patient.gender|Person.gender");
MethodOutcome result= myClient.create().resource(searchParameter).execute(); MethodOutcome result = myClient.create().resource(searchParameter).execute();
assertEquals(true, result.getCreated()); assertEquals(true, result.getCreated());
@ -757,7 +762,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
@Test @Test
public void testCreateWithNoBody() throws IOException { public void testCreateWithNoBody() throws IOException {
@ -817,7 +821,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
@BeforeEach @BeforeEach
public void beforeDisableResultReuse() { public void beforeDisableResultReuse() {
myDaoConfig.setReuseCachedSearchResultsForMillis(null); myDaoConfig.setReuseCachedSearchResultsForMillis(null);
@ -831,7 +834,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals(200, resp.getStatusLine().getStatusCode()); assertEquals(200, resp.getStatusLine().getStatusCode());
} }
private ArrayList<IBaseResource> genResourcesOfType(Bundle theRes, Class<? extends IBaseResource> theClass) { private ArrayList<IBaseResource> genResourcesOfType(Bundle theRes, Class<? extends IBaseResource> theClass) {
ArrayList<IBaseResource> retVal = new ArrayList<>(); ArrayList<IBaseResource> retVal = new ArrayList<>();
for (BundleEntryComponent next : theRes.getEntry()) { for (BundleEntryComponent next : theRes.getEntry()) {
@ -974,7 +976,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
@Test @Test
public void testCreateAndReadBackResourceWithContainedReferenceToContainer() { public void testCreateAndReadBackResourceWithContainedReferenceToContainer() {
myFhirContext.setParserErrorHandler(new StrictErrorHandler()); myFhirContext.setParserErrorHandler(new StrictErrorHandler());
@ -1039,7 +1040,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals("#", loc.getManagingOrganization().getReference()); assertEquals("#", loc.getManagingOrganization().getReference());
} }
@Test @Test
public void testCountParam() { public void testCountParam() {
List<IBaseResource> resources = new ArrayList<>(); List<IBaseResource> resources = new ArrayList<>();
@ -1099,7 +1099,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertNull(p.getBirthDate()); assertNull(p.getBirthDate());
} }
/** /**
* See #438 * See #438
*/ */
@ -1648,7 +1647,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
String encoded = myFhirContext.newXmlParser().encodeResourceToString(response.getOperationOutcome()); String encoded = myFhirContext.newXmlParser().encodeResourceToString(response.getOperationOutcome());
ourLog.info(encoded); ourLog.info(encoded);
assertThat(encoded, containsString( assertThat(encoded, containsString(
"<issue><severity value=\"information\"/><code value=\"informational\"/><diagnostics value=\"Successfully deleted 2 resource(s) in ")); "Successfully deleted 2 resource(s). Took "));
try { try {
myClient.read().resource("Patient").withId(id1).execute(); myClient.read().resource("Patient").withId(id1).execute();
fail(); fail();
@ -1674,7 +1673,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
ourLog.info(response); ourLog.info(response);
assertEquals(200, resp.getStatusLine().getStatusCode()); assertEquals(200, resp.getStatusLine().getStatusCode());
assertThat(response, containsString( assertThat(response, containsString(
"<issue><severity value=\"warning\"/><code value=\"not-found\"/><diagnostics value=\"Unable to find resource matching URL &quot;Patient?identifier=testDeleteConditionalNoMatches&quot;. Deletion failed.\"/></issue>")); "<diagnostics value=\"Unable to find resource matching URL &quot;Patient?identifier=testDeleteConditionalNoMatches&quot;. Nothing has been deleted.\"/>"
));
} }
} }
@ -1711,7 +1711,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
} }
@Test @Test
@Disabled @Disabled
public void testQuery() throws IOException { public void testQuery() throws IOException {
@ -1752,7 +1751,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp); ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp); OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in ")); assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s). Took"));
} finally { } finally {
response.close(); response.close();
} }
@ -1779,7 +1778,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp); ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp); OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL \"Patient?name=testDeleteResourceConditional1\". Deletion failed.")); assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL"));
} finally { } finally {
response.close(); response.close();
} }
@ -1852,17 +1851,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
@Test
public void testDeleteReturnsOperationOutcome() {
Patient p = new Patient();
p.addName().setFamily("FAM");
IIdType id = myClient.create().resource(p).execute().getId().toUnqualifiedVersionless();
MethodOutcome resp = myClient.delete().resourceById(id).execute();
OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in "));
}
@Test @Test
public void testDeleteNonExistingResourceReturnsOperationOutcome() { public void testDeleteNonExistingResourceReturnsOperationOutcome() {
String resourceType = "Patient"; String resourceType = "Patient";
@ -1881,7 +1869,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
MethodOutcome resp = myClient.delete().resourceById(id).execute(); MethodOutcome resp = myClient.delete().resourceById(id).execute();
OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome(); OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome();
assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in ")); assertThat(oo.getIssueFirstRep().getDiagnostics(), containsString("Successfully deleted 1 resource(s)."));
assertThat(oo.getIssueFirstRep().getDiagnostics(), containsString("Took "));
resp = myClient.delete().resourceById(id).execute(); resp = myClient.delete().resourceById(id).execute();
oo = (OperationOutcome) resp.getOperationOutcome(); oo = (OperationOutcome) resp.getOperationOutcome();
@ -2349,7 +2338,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertThat(ids, not(containsInRelativeOrder(c3Id))); assertThat(ids, not(containsInRelativeOrder(c3Id)));
} }
@Test @Test
public void testEverythingPatientTypeWithIdParameter() { public void testEverythingPatientTypeWithIdParameter() {
String methodName = "testEverythingPatientTypeWithIdParameter"; String methodName = "testEverythingPatientTypeWithIdParameter";
@ -2967,7 +2955,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
} }
@Test @Test
public void testValidateResourceContainingProfileDeclarationDoesntResolve() throws IOException { public void testValidateResourceContainingProfileDeclarationDoesntResolve() throws IOException {
Observation input = new Observation(); Observation input = new Observation();
@ -2988,7 +2975,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
} }
@SuppressWarnings("unused") @SuppressWarnings("unused")
@Test @Test
public void testFullTextSearch() throws Exception { public void testFullTextSearch() throws Exception {
@ -3397,31 +3383,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
// private void delete(String theResourceType, String theParamName, String theParamValue) {
// Bundle resources;
// do {
// IQuery<Bundle> forResource = ourClient.search().forResource(theResourceType);
// if (theParamName != null) {
// forResource = forResource.where(new StringClientParam(theParamName).matches().value(theParamValue));
// }
// resources = forResource.execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// ourClient.delete().resource(next).execute();
// }
// } while (resources.size() > 0);
// }
//
// private void deleteToken(String theResourceType, String theParamName, String theParamSystem, String theParamValue)
// {
// Bundle resources = ourClient.search().forResource(theResourceType).where(new
// TokenClientParam(theParamName).exactly().systemAndCode(theParamSystem, theParamValue)).execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// ourClient.delete().resource(next).execute();
// }
// }
@Test @Test
public void testIdAndVersionInBodyForCreate() throws IOException { public void testIdAndVersionInBodyForCreate() throws IOException {
String methodName = "testIdAndVersionInBodyForCreate"; String methodName = "testIdAndVersionInBodyForCreate";
@ -3464,6 +3425,31 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
} }
// private void delete(String theResourceType, String theParamName, String theParamValue) {
// Bundle resources;
// do {
// IQuery<Bundle> forResource = ourClient.search().forResource(theResourceType);
// if (theParamName != null) {
// forResource = forResource.where(new StringClientParam(theParamName).matches().value(theParamValue));
// }
// resources = forResource.execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// ourClient.delete().resource(next).execute();
// }
// } while (resources.size() > 0);
// }
//
// private void deleteToken(String theResourceType, String theParamName, String theParamSystem, String theParamValue)
// {
// Bundle resources = ourClient.search().forResource(theResourceType).where(new
// TokenClientParam(theParamName).exactly().systemAndCode(theParamSystem, theParamValue)).execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// ourClient.delete().resource(next).execute();
// }
// }
@Test @Test
public void testIdAndVersionInBodyForUpdate() throws IOException { public void testIdAndVersionInBodyForUpdate() throws IOException {
String methodName = "testIdAndVersionInBodyForUpdate"; String methodName = "testIdAndVersionInBodyForUpdate";
@ -4190,7 +4176,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals("hugs", enc.getReasonCodeFirstRep().getCodingFirstRep().getCode()); assertEquals("hugs", enc.getReasonCodeFirstRep().getCodingFirstRep().getCode());
} }
@Test @Test
public void testTerminologyWithCompleteCs_SearchForConceptIn() throws Exception { public void testTerminologyWithCompleteCs_SearchForConceptIn() throws Exception {
@ -5093,7 +5078,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals(2, ids.size()); assertEquals(2, ids.size());
} }
@Test @Test
public void testSearchWithNormalizedQuantitySearchSupported_DegreeFahrenheit() throws Exception { public void testSearchWithNormalizedQuantitySearchSupported_DegreeFahrenheit() throws Exception {
@ -5244,7 +5228,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
} }
@Test @Test
public void testSearchReusesResultsDisabled() { public void testSearchReusesResultsDisabled() {
List<IBaseResource> resources = new ArrayList<>(); List<IBaseResource> resources = new ArrayList<>();
@ -5863,7 +5846,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals(oid2, list.get(3)); assertEquals(oid2, list.get(3));
} }
@Test @Test
public void testSearchWithMissing() { public void testSearchWithMissing() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
@ -7475,7 +7457,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
// ensure the patient has the expected overall history // ensure the patient has the expected overall history
Bundle result = myClient.history() Bundle result = myClient.history()
.onInstance("Patient/"+patientId) .onInstance("Patient/" + patientId)
.returnBundle(Bundle.class) .returnBundle(Bundle.class)
.execute(); .execute();
@ -7508,8 +7490,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertTrue(timeBetweenUpdates.before(dateV2)); assertTrue(timeBetweenUpdates.before(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates)); List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertEquals(2, resultIds.size()); assertEquals(2, resultIds.size());
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/1")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/1"));
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
} }
private void verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException { private void verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
@ -7518,17 +7500,17 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertTrue(timeBetweenUpdates.after(dateV2)); assertTrue(timeBetweenUpdates.after(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates)); List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertEquals(1, resultIds.size()); assertEquals(1, resultIds.size());
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
} }
private void verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException { private void verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, - delayInMs); Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, -delayInMs);
assertTrue(timeBetweenUpdates.before(dateV1)); assertTrue(timeBetweenUpdates.before(dateV1));
assertTrue(timeBetweenUpdates.before(dateV2)); assertTrue(timeBetweenUpdates.before(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates)); List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertEquals(2, resultIds.size()); assertEquals(2, resultIds.size());
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/1")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/1"));
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
} }
private void verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException { private void verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
@ -7537,7 +7519,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertTrue(timeBetweenUpdates.before(dateV2)); assertTrue(timeBetweenUpdates.before(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates)); List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates));
assertEquals(1, resultIds.size()); assertEquals(1, resultIds.size());
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
} }
private void verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException { private void verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
@ -7549,13 +7531,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
private void verifySinceBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException { private void verifySinceBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, - delayInMs); Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, -delayInMs);
assertTrue(timeBetweenUpdates.before(dateV1)); assertTrue(timeBetweenUpdates.before(dateV1));
assertTrue(timeBetweenUpdates.before(dateV2)); assertTrue(timeBetweenUpdates.before(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates)); List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates));
assertEquals(2, resultIds.size()); assertEquals(2, resultIds.size());
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/1")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/1"));
assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2")); assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
} }
@Test @Test
@ -7686,7 +7668,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
return new InstantDt(theDate).getValueAsString(); return new InstantDt(theDate).getValueAsString();
} }
public IIdType createPatientWithIndexAtOrganization(String theMethodName, String theIndex, IIdType theOrganizationId) { public IIdType createPatientWithIndexAtOrganization(String theMethodName, String theIndex, IIdType theOrganizationId) {
Patient p1 = new Patient(); Patient p1 = new Patient();
p1.addName().setFamily(theMethodName + theIndex); p1.addName().setFamily(theMethodName + theIndex);
@ -7728,39 +7709,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
@Nested @Nested
public class MissingSearchParameterTests { public class MissingSearchParameterTests {
private interface XtoY<X, Y> {
Y doTask(X theInput);
}
private static class MissingSearchTestParameters {
/**
* The setting for IndexMissingFields
*/
public final DaoConfig.IndexEnabledEnum myEnableMissingFieldsValue;
/**
* Whether to use :missing=true/false
*/
public final boolean myIsMissing;
/**
* Whether or not the field is populated or not.
* True -> populate field.
* False -> not populated
*/
public final boolean myIsValuePresentOnResource;
public MissingSearchTestParameters(
DaoConfig.IndexEnabledEnum theEnableMissingFields,
boolean theIsMissing,
boolean theHasField
) {
myEnableMissingFieldsValue = theEnableMissingFields;
myIsMissing = theIsMissing;
myIsValuePresentOnResource = theHasField;
}
}
private IParser myParser; private IParser myParser;
@BeforeEach @BeforeEach
@ -7827,30 +7775,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
//@formatter:on //@formatter:on
} }
/**
* The method that generates parameters for tests
*/
private static Stream<Arguments> provideParameters() {
return Stream.of(
// 1
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, true)),
// 2
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, false)),
// 3
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, true)),
// 4
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, false)),
// 5
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, true)),
// 6
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, true)),
// 7
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, false)),
// 8
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, false))
);
}
/** /**
* Runs the actual test for whichever search parameter and given inputs we want. * Runs the actual test for whichever search parameter and given inputs we want.
*/ */
@ -8036,6 +7960,63 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
); );
}); });
} }
private interface XtoY<X, Y> {
Y doTask(X theInput);
}
private static class MissingSearchTestParameters {
/**
* The setting for IndexMissingFields
*/
public final DaoConfig.IndexEnabledEnum myEnableMissingFieldsValue;
/**
* Whether to use :missing=true/false
*/
public final boolean myIsMissing;
/**
* Whether or not the field is populated or not.
* True -> populate field.
* False -> not populated
*/
public final boolean myIsValuePresentOnResource;
public MissingSearchTestParameters(
DaoConfig.IndexEnabledEnum theEnableMissingFields,
boolean theIsMissing,
boolean theHasField
) {
myEnableMissingFieldsValue = theEnableMissingFields;
myIsMissing = theIsMissing;
myIsValuePresentOnResource = theHasField;
}
}
/**
* The method that generates parameters for tests
*/
private static Stream<Arguments> provideParameters() {
return Stream.of(
// 1
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, true)),
// 2
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, false)),
// 3
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, true)),
// 4
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, false)),
// 5
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, true)),
// 6
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, true)),
// 7
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, false)),
// 8
Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, false))
);
}
} }
} }

View File

@ -17,6 +17,7 @@ import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl;
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl; import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap; import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.cache.ResourceVersionMap; import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.JpaResourceDao; import ca.uhn.fhir.jpa.dao.JpaResourceDao;
import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
@ -142,6 +143,8 @@ public class GiantTransactionPerfTest {
private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer; private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer;
@Mock @Mock
private IIdHelperService myIdHelperService; private IIdHelperService myIdHelperService;
@Mock
private IJpaStorageResourceParser myJpaStorageResourceParser;
@AfterEach @AfterEach
public void afterEach() { public void afterEach() {
@ -192,7 +195,6 @@ public class GiantTransactionPerfTest {
mySystemDao = new FhirSystemDaoR4(); mySystemDao = new FhirSystemDaoR4();
mySystemDao.setTransactionProcessorForUnitTest(myTransactionProcessor); mySystemDao.setTransactionProcessorForUnitTest(myTransactionProcessor);
mySystemDao.setDaoConfigForUnitTest(myDaoConfig); mySystemDao.setDaoConfigForUnitTest(myDaoConfig);
mySystemDao.setPartitionSettingsForUnitTest(myPartitionSettings);
mySystemDao.start(); mySystemDao.start();
when(myAppCtx.getBean(eq(IInstanceValidatorModule.class))).thenReturn(myInstanceValidatorSvc); when(myAppCtx.getBean(eq(IInstanceValidatorModule.class))).thenReturn(myInstanceValidatorSvc);
@ -265,6 +267,7 @@ public class GiantTransactionPerfTest {
myEobDao.setDaoConfigForUnitTest(myDaoConfig); myEobDao.setDaoConfigForUnitTest(myDaoConfig);
myEobDao.setIdHelperSvcForUnitTest(myIdHelperService); myEobDao.setIdHelperSvcForUnitTest(myIdHelperService);
myEobDao.setPartitionSettingsForUnitTest(myPartitionSettings); myEobDao.setPartitionSettingsForUnitTest(myPartitionSettings);
myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser);
myEobDao.start(); myEobDao.start();
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao)); myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -85,6 +86,8 @@ class ITermReadSvcTest {
private DaoRegistry myDaoRegistry; private DaoRegistry myDaoRegistry;
@Mock @Mock
private IFhirResourceDao<CodeSystem> myFhirResourceDao; private IFhirResourceDao<CodeSystem> myFhirResourceDao;
@Mock
private IJpaStorageResourceParser myJpaStorageResourceParser;
@Nested @Nested
@ -93,6 +96,7 @@ class ITermReadSvcTest {
@BeforeEach @BeforeEach
public void setup() { public void setup() {
ReflectionTestUtils.setField(testedClass, "myTermValueSetDao", myTermValueSetDao); ReflectionTestUtils.setField(testedClass, "myTermValueSetDao", myTermValueSetDao);
ReflectionTestUtils.setField(testedClass, "myJpaStorageResourceParser", myJpaStorageResourceParser);
} }
@Test @Test
@ -214,6 +218,7 @@ class ITermReadSvcTest {
@BeforeEach @BeforeEach
public void setup() { public void setup() {
ReflectionTestUtils.setField(testedClass, "myEntityManager", myEntityManager); ReflectionTestUtils.setField(testedClass, "myEntityManager", myEntityManager);
ReflectionTestUtils.setField(testedClass, "myJpaStorageResourceParser", myJpaStorageResourceParser);
} }
@ -245,13 +250,13 @@ class ITermReadSvcTest {
when(myEntityManager.createQuery(anyString()).getResultList()) when(myEntityManager.createQuery(anyString()).getResultList())
.thenReturn(Lists.newArrayList(resource1)); .thenReturn(Lists.newArrayList(resource1));
when(myDaoRegistry.getResourceDao("CodeSystem")).thenReturn(myFhirResourceDao); when(myDaoRegistry.getResourceDao("CodeSystem")).thenReturn(myFhirResourceDao);
when(myFhirResourceDao.toResource(resource1, false)).thenReturn(myCodeSystemResource); when(myJpaStorageResourceParser.toResource(resource1, false)).thenReturn(myCodeSystemResource);
testedClass.readCodeSystemByForcedId("a-cs-id"); testedClass.readCodeSystemByForcedId("a-cs-id");
verify(myFhirResourceDao, times(1)).toResource(any(), eq(false)); verify(myJpaStorageResourceParser, times(1)).toResource(any(), eq(false));
} }
} }

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -535,12 +535,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
myPartitionSettings.setPartitioningEnabled(false); myPartitionSettings.setPartitioningEnabled(false);
} }
@Order(Integer.MIN_VALUE)
@BeforeEach
public void beforeResetInterceptors() {
// nothing
}
@Override @Override
@Order(Integer.MAX_VALUE) @Order(Integer.MAX_VALUE)
@AfterEach @AfterEach

View File

@ -146,9 +146,9 @@ public class PatientReindexTestHelper {
patient.getNameFirstRep().setFamily("Family-"+i).addGiven("Given-"+i); patient.getNameFirstRep().setFamily("Family-"+i).addGiven("Given-"+i);
patient.getIdentifierFirstRep().setValue("Id-"+i); patient.getIdentifierFirstRep().setValue("Id-"+i);
myPatientDao.create(patient, requestDetails); myPatientDao.create(patient, requestDetails);
}
TestUtil.sleepOneClick(); TestUtil.sleepOneClick();
} }
}
private void validatePersistedPatients(int theExpectedNumPatients, long theExpectedVersion) { private void validatePersistedPatients(int theExpectedNumPatients, long theExpectedVersion) {
RequestDetails requestDetails = new SystemRequestDetails(); RequestDetails requestDetails = new SystemRequestDetails();

View File

@ -2,6 +2,8 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.IStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
@ -192,6 +194,8 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
private int askAtOrderEntryCount = 0; private int askAtOrderEntryCount = 0;
private int validatedPropertiesCounter = 0; private int validatedPropertiesCounter = 0;
private int validatedMapToEntriesCounter = 0; private int validatedMapToEntriesCounter = 0;
@Autowired
private IJpaStorageResourceParser myJpaStorageResourceParser;
@BeforeEach @BeforeEach
void setUp() { void setUp() {
@ -606,7 +610,7 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
List<ResourceTable> vsList = (List<ResourceTable>) q1.getResultList(); List<ResourceTable> vsList = (List<ResourceTable>) q1.getResultList();
assertEquals(1, vsList.size()); assertEquals(1, vsList.size());
long vsLongId = vsList.get(0).getId(); long vsLongId = vsList.get(0).getId();
ValueSet vs = (ValueSet) myValueSetDao.toResource(vsList.get(0), false); ValueSet vs = (ValueSet) myJpaStorageResourceParser.toResource(vsList.get(0), false);
assertNotNull(vs); assertNotNull(vs);
Query q2 = myEntityManager.createQuery("from TermValueSet where myResource = " + vsLongId); Query q2 = myEntityManager.createQuery("from TermValueSet where myResource = " + vsLongId);

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -63,6 +63,7 @@ public class TransactionDetails {
private Map<String, Object> myUserData; private Map<String, Object> myUserData;
private ListMultimap<Pointcut, HookParams> myDeferredInterceptorBroadcasts; private ListMultimap<Pointcut, HookParams> myDeferredInterceptorBroadcasts;
private EnumSet<Pointcut> myDeferredInterceptorBroadcastPointcuts; private EnumSet<Pointcut> myDeferredInterceptorBroadcastPointcuts;
private boolean myFhirTransaction;
/** /**
* Constructor * Constructor
@ -306,5 +307,13 @@ public class TransactionDetails {
public boolean hasResolvedResourceIds() { public boolean hasResolvedResourceIds() {
return !myResolvedResourceIds.isEmpty(); return !myResolvedResourceIds.isEmpty();
} }
public void setFhirTransaction(boolean theFhirTransaction) {
myFhirTransaction = theFhirTransaction;
}
public boolean isFhirTransaction() {
return myFhirTransaction;
}
} }

View File

@ -37,6 +37,7 @@ import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
@ -515,6 +516,9 @@ public class AuthorizationInterceptor implements IRuleApplier {
retVal = retVal.subList(1, retVal.size()); retVal = retVal.subList(1, retVal.size());
} }
// Don't apply security to OperationOutcome
retVal.removeIf(t->t instanceof IBaseOperationOutcome);
return retVal; return retVal;
} }

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir-serviceloaders</artifactId> <artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir-serviceloaders</artifactId> <artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
@ -20,7 +20,7 @@
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-caching-api</artifactId> <artifactId>hapi-fhir-caching-api</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.github.ben-manes.caffeine</groupId> <groupId>com.github.ben-manes.caffeine</groupId>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir-serviceloaders</artifactId> <artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath> <relativePath>../../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId> <artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId> <artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId> <artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId> <artifactId>hapi-fhir-spring-boot</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -33,6 +33,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor; import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc; import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
@ -41,8 +42,10 @@ import com.google.common.collect.ListMultimap;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -60,8 +63,13 @@ public class ExpandResourcesStep implements IJobStepWorker<BulkExportJobParamete
@Autowired @Autowired
private IBulkExportProcessor myBulkExportProcessor; private IBulkExportProcessor myBulkExportProcessor;
@Autowired(required = false) @Autowired
private ResponseTerminologyTranslationSvc myResponseTerminologyTranslationSvc; private ApplicationContext myApplicationContext;
@Autowired
private ModelConfig myModelConfig;
private volatile ResponseTerminologyTranslationSvc myResponseTerminologyTranslationSvc;
@Nonnull @Nonnull
@Override @Override
@ -82,8 +90,14 @@ public class ExpandResourcesStep implements IJobStepWorker<BulkExportJobParamete
myBulkExportProcessor.expandMdmResources(allResources); myBulkExportProcessor.expandMdmResources(allResources);
} }
if (myResponseTerminologyTranslationSvc != null) { // Normalize terminology
myResponseTerminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources); if (myModelConfig.isNormalizeTerminologyForBulkExportJobs()) {
ResponseTerminologyTranslationSvc terminologyTranslationSvc = myResponseTerminologyTranslationSvc;
if (terminologyTranslationSvc == null) {
terminologyTranslationSvc = myApplicationContext.getBean(ResponseTerminologyTranslationSvc.class);
myResponseTerminologyTranslationSvc = terminologyTranslationSvc;
}
terminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources);
} }
// encode them // encode them

View File

@ -13,6 +13,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor; import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc; import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
@ -54,6 +55,9 @@ public class ExpandResourcesStepTest {
@Spy @Spy
private FhirContext myFhirContext = FhirContext.forR4Cached(); private FhirContext myFhirContext = FhirContext.forR4Cached();
@Spy
private ModelConfig myModelConfig = new ModelConfig();
@InjectMocks @InjectMocks
private ExpandResourcesStep mySecondStep; private ExpandResourcesStep mySecondStep;

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.3.1-SNAPSHOT</version> <version>6.3.2-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

Some files were not shown because too many files have changed in this diff Show More