theCodeSystems, String theClasspath) {
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICodingEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICodingEnum.java
new file mode 100644
index 00000000000..5ee7161d130
--- /dev/null
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICodingEnum.java
@@ -0,0 +1,29 @@
+package ca.uhn.fhir.model.api;
+
+/*-
+ * #%L
+ * HAPI FHIR - Core Library
+ * %%
+ * Copyright (C) 2014 - 2022 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+public interface ICodingEnum {
+
+ String getCode();
+ String getSystem();
+ String getDisplay();
+
+}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/StorageResponseCodeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/StorageResponseCodeEnum.java
new file mode 100644
index 00000000000..27fde42c17e
--- /dev/null
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/StorageResponseCodeEnum.java
@@ -0,0 +1,72 @@
+package ca.uhn.fhir.model.api;
+
+/*-
+ * #%L
+ * HAPI FHIR - Core Library
+ * %%
+ * Copyright (C) 2014 - 2022 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+/**
+ * This enum contains the allowable codes in the HAPI FHIR defined
+ * codesystem: https://hapifhir.io/fhir/CodeSystem/hapi-fhir-storage-response-code
+ *
+ * This is used in CRUD response OperationOutcome resources.
+ */
+public enum StorageResponseCodeEnum implements ICodingEnum {
+
+ SUCCESSFUL_CREATE("Create succeeded."),
+ SUCCESSFUL_CREATE_NO_CONDITIONAL_MATCH("Conditional create succeeded: no existing resource matched the conditional URL."),
+ SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH("Conditional create succeeded: an existing resource matched the conditional URL so no action was taken."),
+ SUCCESSFUL_UPDATE("Update succeeded."),
+ SUCCESSFUL_UPDATE_AS_CREATE("Update as create succeeded."),
+ SUCCESSFUL_UPDATE_NO_CHANGE("Update succeeded: No changes were detected so no action was taken."),
+ SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH("Conditional update succeeded: no existing resource matched the conditional URL so a new resource was created."),
+ SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH("Conditional update succeeded: an existing resource matched the conditional URL and was updated."),
+ SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE("Conditional update succeeded: an existing resource matched the conditional URL and was updated, but no changes were detected so no action was taken."),
+ SUCCESSFUL_DELETE("Delete succeeded."),
+ SUCCESSFUL_DELETE_ALREADY_DELETED("Delete succeeded: Resource was already deleted so no action was taken."),
+ SUCCESSFUL_DELETE_NOT_FOUND("Delete succeeded: No existing resource was found so no action was taken."),
+
+ SUCCESSFUL_PATCH("Patch succeeded."),
+
+ SUCCESSFUL_PATCH_NO_CHANGE("Patch succeeded: No changes were detected so no action was taken."),
+ SUCCESSFUL_CONDITIONAL_PATCH("Conditional patch succeeded."),
+ SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE("Conditional patch succeeded: No changes were detected so no action was taken.");
+
+ public static final String SYSTEM = "https://hapifhir.io/fhir/CodeSystem/hapi-fhir-storage-response-code";
+
+ private final String myDisplay;
+
+ StorageResponseCodeEnum(String theDisplay) {
+ myDisplay = theDisplay;
+ }
+
+ @Override
+ public String getCode() {
+ return name();
+ }
+
+ @Override
+ public String getSystem() {
+ return SYSTEM;
+ }
+
+ @Override
+ public String getDisplay() {
+ return myDisplay;
+ }
+}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java
index 9f531df5b35..7b3b3f52734 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java
@@ -29,9 +29,12 @@ import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
import org.hl7.fhir.instance.model.api.IBaseBundle;
+import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import javax.annotation.Nonnull;
import java.util.Objects;
/**
@@ -40,7 +43,7 @@ import java.util.Objects;
* (method and search).
*
*
- *
+ *
* This is not yet complete, and doesn't support all FHIR features. USE WITH CAUTION as the API
* may change.
*
@@ -101,10 +104,8 @@ public class BundleBuilder {
/**
* Sets the specified primitive field on the bundle with the value provided.
*
- * @param theFieldName
- * Name of the primitive field.
- * @param theFieldValue
- * Value of the field to be set.
+ * @param theFieldName Name of the primitive field.
+ * @param theFieldValue Value of the field to be set.
*/
public BundleBuilder setBundleField(String theFieldName, String theFieldValue) {
BaseRuntimeChildDefinition typeChild = myBundleDef.getChildByName(theFieldName);
@@ -119,12 +120,9 @@ public class BundleBuilder {
/**
* Sets the specified primitive field on the search entry with the value provided.
*
- * @param theSearch
- * Search part of the entry
- * @param theFieldName
- * Name of the primitive field.
- * @param theFieldValue
- * Value of the field to be set.
+ * @param theSearch Search part of the entry
+ * @param theFieldName Name of the primitive field.
+ * @param theFieldValue Value of the field to be set.
*/
public BundleBuilder setSearchField(IBase theSearch, String theFieldName, String theFieldValue) {
BaseRuntimeChildDefinition typeChild = mySearchDef.getChildByName(theFieldName);
@@ -144,6 +142,37 @@ public class BundleBuilder {
return this;
}
+ /**
+ * Adds a FHIRPatch patch bundle to the transaction
+ * @param theTarget The target resource ID to patch
+ * @param thePatch The FHIRPath Parameters resource
+ * @since 6.3.0
+ */
+ public PatchBuilder addTransactionFhirPatchEntry(IIdType theTarget, IBaseParameters thePatch) {
+ Validate.notNull(theTarget, "theTarget must not be null");
+ Validate.notBlank(theTarget.getResourceType(), "theTarget must contain a resource type");
+ Validate.notBlank(theTarget.getIdPart(), "theTarget must contain an ID");
+
+ IPrimitiveType> url = addAndPopulateTransactionBundleEntryRequest(thePatch, theTarget.getValue(), theTarget.toUnqualifiedVersionless().getValue(), "PATCH");
+
+ return new PatchBuilder(url);
+ }
+
+ /**
+ * Adds a FHIRPatch patch bundle to the transaction. This method is intended for conditional PATCH operations. If you
+ * know the ID of the resource you wish to patch, use {@link #addTransactionFhirPatchEntry(IIdType, IBaseParameters)}
+ * instead.
+ *
+ * @param thePatch The FHIRPath Parameters resource
+ * @since 6.3.0
+ * @see #addTransactionFhirPatchEntry(IIdType, IBaseParameters)
+ */
+ public PatchBuilder addTransactionFhirPatchEntry(IBaseParameters thePatch) {
+ IPrimitiveType> url = addAndPopulateTransactionBundleEntryRequest(thePatch, null, null, "PATCH");
+
+ return new PatchBuilder(url);
+ }
+
/**
* Adds an entry containing an update (PUT) request.
* Also sets the Bundle.type value to "transaction" if it is not already set.
@@ -151,22 +180,39 @@ public class BundleBuilder {
* @param theResource The resource to update
*/
public UpdateBuilder addTransactionUpdateEntry(IBaseResource theResource) {
+ Validate.notNull(theResource, "theResource must not be null");
+
+ IIdType id = theResource.getIdElement();
+ if (id.hasIdPart() && !id.hasResourceType()) {
+ String resourceType = myContext.getResourceType(theResource);
+ id = id.withResourceType(resourceType);
+ }
+
+ String requestUrl = id.toUnqualifiedVersionless().getValue();
+ String fullUrl = id.getValue();
+ String verb = "PUT";
+
+ IPrimitiveType> url = addAndPopulateTransactionBundleEntryRequest(theResource, fullUrl, requestUrl, verb);
+
+ return new UpdateBuilder(url);
+ }
+
+ @Nonnull
+ private IPrimitiveType> addAndPopulateTransactionBundleEntryRequest(IBaseResource theResource, String theFullUrl, String theRequestUrl, String theHttpVerb) {
setBundleField("type", "transaction");
- IBase request = addEntryAndReturnRequest(theResource);
+ IBase request = addEntryAndReturnRequest(theResource, theFullUrl);
// Bundle.entry.request.url
IPrimitiveType> url = (IPrimitiveType>) myContext.getElementDefinition("uri").newInstance();
- String resourceType = myContext.getResourceType(theResource);
- url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue());
+ url.setValueAsString(theRequestUrl);
myEntryRequestUrlChild.getMutator().setValue(request, url);
- // Bundle.entry.request.url
+ // Bundle.entry.request.method
IPrimitiveType> method = (IPrimitiveType>) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments());
- method.setValueAsString("PUT");
+ method.setValueAsString(theHttpVerb);
myEntryRequestMethodChild.getMutator().setValue(request, method);
-
- return new UpdateBuilder(url);
+ return url;
}
/**
@@ -178,7 +224,7 @@ public class BundleBuilder {
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) {
setBundleField("type", "transaction");
- IBase request = addEntryAndReturnRequest(theResource);
+ IBase request = addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
String resourceType = myContext.getResourceType(theResource);
@@ -198,15 +244,30 @@ public class BundleBuilder {
/**
* Adds an entry containing a delete (DELETE) request.
* Also sets the Bundle.type value to "transaction" if it is not already set.
- *
+ *
* Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry,
*
* @param theResource The resource to delete.
*/
- public void addTransactionDeleteEntry(IBaseResource theResource) {
+ public DeleteBuilder addTransactionDeleteEntry(IBaseResource theResource) {
String resourceType = myContext.getResourceType(theResource);
String idPart = theResource.getIdElement().toUnqualifiedVersionless().getIdPart();
- addTransactionDeleteEntry(resourceType, idPart);
+ return addTransactionDeleteEntry(resourceType, idPart);
+ }
+
+ /**
+ * Adds an entry containing a delete (DELETE) request.
+ * Also sets the Bundle.type value to "transaction" if it is not already set.
+ *
+ * Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry,
+ *
+ * @param theResourceId The resource ID to delete.
+ * @return
+ */
+ public DeleteBuilder addTransactionDeleteEntry(IIdType theResourceId) {
+ String resourceType = theResourceId.getResourceType();
+ String idPart = theResourceId.getIdPart();
+ return addTransactionDeleteEntry(resourceType, idPart);
}
/**
@@ -214,24 +275,45 @@ public class BundleBuilder {
* Also sets the Bundle.type value to "transaction" if it is not already set.
*
* @param theResourceType The type resource to delete.
- * @param theIdPart the ID of the resource to delete.
+ * @param theIdPart the ID of the resource to delete.
*/
- public void addTransactionDeleteEntry(String theResourceType, String theIdPart) {
+ public DeleteBuilder addTransactionDeleteEntry(String theResourceType, String theIdPart) {
setBundleField("type", "transaction");
- IBase request = addEntryAndReturnRequest();
IdDt idDt = new IdDt(theIdPart);
-
+
+ String deleteUrl = idDt.toUnqualifiedVersionless().withResourceType(theResourceType).getValue();
+
+ return addDeleteEntry(deleteUrl);
+ }
+
+ /**
+ * Adds an entry containing a delete (DELETE) request.
+ * Also sets the Bundle.type value to "transaction" if it is not already set.
+ *
+ * @param theMatchUrl The match URL, e.g. Patient?identifier=http://foo|123
+ * @since 6.3.0
+ */
+ public BaseOperationBuilder addTransactionDeleteEntryConditional(String theMatchUrl) {
+ Validate.notBlank(theMatchUrl, "theMatchUrl must not be null or blank");
+ return addDeleteEntry(theMatchUrl);
+ }
+
+ @Nonnull
+ private DeleteBuilder addDeleteEntry(String theDeleteUrl) {
+ IBase request = addEntryAndReturnRequest();
+
// Bundle.entry.request.url
IPrimitiveType> url = (IPrimitiveType>) myContext.getElementDefinition("uri").newInstance();
- url.setValueAsString(idDt.toUnqualifiedVersionless().withResourceType(theResourceType).getValue());
+ url.setValueAsString(theDeleteUrl);
myEntryRequestUrlChild.getMutator().setValue(request, url);
// Bundle.entry.request.method
IPrimitiveType> method = (IPrimitiveType>) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments());
method.setValueAsString("DELETE");
myEntryRequestMethodChild.getMutator().setValue(request, method);
- }
+ return new DeleteBuilder();
+ }
/**
@@ -239,14 +321,13 @@ public class BundleBuilder {
*/
public void addCollectionEntry(IBaseResource theResource) {
setType("collection");
- addEntryAndReturnRequest(theResource);
+ addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
}
/**
* Creates new entry and adds it to the bundle
*
- * @return
- * Returns the new entry.
+ * @return Returns the new entry.
*/
public IBase addEntry() {
IBase entry = myEntryDef.newInstance();
@@ -258,8 +339,7 @@ public class BundleBuilder {
* Creates new search instance for the specified entry
*
* @param entry Entry to create search instance for
- * @return
- * Returns the search instance
+ * @return Returns the search instance
*/
public IBaseBackboneElement addSearch(IBase entry) {
IBase searchInstance = mySearchDef.newInstance();
@@ -267,19 +347,14 @@ public class BundleBuilder {
return (IBaseBackboneElement) searchInstance;
}
- /**
- *
- * @param theResource
- * @return
- */
- public IBase addEntryAndReturnRequest(IBaseResource theResource) {
+ private IBase addEntryAndReturnRequest(IBaseResource theResource, String theFullUrl) {
Validate.notNull(theResource, "theResource must not be null");
IBase entry = addEntry();
// Bundle.entry.fullUrl
IPrimitiveType> fullUrl = (IPrimitiveType>) myContext.getElementDefinition("uri").newInstance();
- fullUrl.setValueAsString(theResource.getIdElement().getValue());
+ fullUrl.setValueAsString(theFullUrl);
myEntryFullUrlChild.getMutator().setValue(entry, fullUrl);
// Bundle.entry.resource
@@ -306,6 +381,15 @@ public class BundleBuilder {
return myBundle;
}
+ /**
+ * Convenience method which auto-casts the results of {@link #getBundle()}
+ *
+ * @since 6.3.0
+ */
+ public T getBundleTyped() {
+ return (T) myBundle;
+ }
+
public BundleBuilder setMetaField(String theFieldName, IBase theFieldValue) {
BaseRuntimeChildDefinition.IMutator mutator = myMetaDef.getChildByName(theFieldName).getMutator();
mutator.setValue(myBundle.getMeta(), theFieldValue);
@@ -315,12 +399,9 @@ public class BundleBuilder {
/**
* Sets the specified entry field.
*
- * @param theEntry
- * The entry instance to set values on
- * @param theEntryChildName
- * The child field name of the entry instance to be set
- * @param theValue
- * The field value to set
+ * @param theEntry The entry instance to set values on
+ * @param theEntryChildName The child field name of the entry instance to be set
+ * @param theValue The field value to set
*/
public void addToEntry(IBase theEntry, String theEntryChildName, IBase theValue) {
addToBase(theEntry, theEntryChildName, theValue, myEntryDef);
@@ -329,12 +410,9 @@ public class BundleBuilder {
/**
* Sets the specified search field.
*
- * @param theSearch
- * The search instance to set values on
- * @param theSearchFieldName
- * The child field name of the search instance to be set
- * @param theSearchFieldValue
- * The field value to set
+ * @param theSearch The search instance to set values on
+ * @param theSearchFieldName The child field name of the search instance to be set
+ * @param theSearchFieldValue The field value to set
*/
public void addToSearch(IBase theSearch, String theSearchFieldName, IBase theSearchFieldValue) {
addToBase(theSearch, theSearchFieldName, theSearchFieldValue, mySearchDef);
@@ -349,12 +427,9 @@ public class BundleBuilder {
/**
* Creates a new primitive.
*
- * @param theTypeName
- * The element type for the primitive
- * @param
- * Actual type of the parameterized primitive type interface
- * @return
- * Returns the new empty instance of the element definition.
+ * @param theTypeName The element type for the primitive
+ * @param Actual type of the parameterized primitive type interface
+ * @return Returns the new empty instance of the element definition.
*/
public IPrimitiveType newPrimitive(String theTypeName) {
BaseRuntimeElementDefinition primitiveDefinition = myContext.getElementDefinition(theTypeName);
@@ -365,14 +440,10 @@ public class BundleBuilder {
/**
* Creates a new primitive instance of the specified element type.
*
- * @param theTypeName
- * Element type to create
- * @param theInitialValue
- * Initial value to be set on the new instance
- * @param
- * Actual type of the parameterized primitive type interface
- * @return
- * Returns the newly created instance
+ * @param theTypeName Element type to create
+ * @param theInitialValue Initial value to be set on the new instance
+ * @param Actual type of the parameterized primitive type interface
+ * @return Returns the newly created instance
*/
public IPrimitiveType newPrimitive(String theTypeName, T theInitialValue) {
IPrimitiveType retVal = newPrimitive(theTypeName);
@@ -389,38 +460,84 @@ public class BundleBuilder {
setBundleField("type", theType);
}
- public static class UpdateBuilder {
- private final IPrimitiveType> myUrl;
+ public class DeleteBuilder extends BaseOperationBuilder {
- public UpdateBuilder(IPrimitiveType> theUrl) {
- myUrl = theUrl;
- }
+ // nothing yet
- /**
- * Make this update a Conditional Update
- */
- public void conditional(String theConditionalUrl) {
- myUrl.setValueAsString(theConditionalUrl);
- }
}
- public class CreateBuilder {
+
+ public class PatchBuilder extends BaseOperationBuilderWithConditionalUrl {
+
+ PatchBuilder(IPrimitiveType> theUrl) {
+ super(theUrl);
+ }
+
+ }
+
+ public class UpdateBuilder extends BaseOperationBuilderWithConditionalUrl {
+ UpdateBuilder(IPrimitiveType> theUrl) {
+ super(theUrl);
+ }
+
+ }
+
+ public class CreateBuilder extends BaseOperationBuilder {
private final IBase myRequest;
- public CreateBuilder(IBase theRequest) {
+ CreateBuilder(IBase theRequest) {
myRequest = theRequest;
}
/**
* Make this create a Conditional Create
*/
- public void conditional(String theConditionalUrl) {
+ public CreateBuilder conditional(String theConditionalUrl) {
BaseRuntimeElementDefinition> stringDefinition = Objects.requireNonNull(myContext.getElementDefinition("string"));
IPrimitiveType> ifNoneExist = (IPrimitiveType>) stringDefinition.newInstance();
ifNoneExist.setValueAsString(theConditionalUrl);
myEntryRequestIfNoneExistChild.getMutator().setValue(myRequest, ifNoneExist);
+
+ return this;
+ }
+
+ }
+
+ public abstract class BaseOperationBuilder {
+
+ /**
+ * Returns a reference to the BundleBuilder instance.
+ *
+ * Calling this method has no effect at all, it is only
+ * provided for easy method chaning if you want to build
+ * your bundle as a single fluent call.
+ *
+ * @since 6.3.0
+ */
+ public BundleBuilder andThen() {
+ return BundleBuilder.this;
+ }
+
+
+ }
+
+ public abstract class BaseOperationBuilderWithConditionalUrl extends BaseOperationBuilder {
+
+ private final IPrimitiveType> myUrl;
+
+ BaseOperationBuilderWithConditionalUrl(IPrimitiveType> theUrl) {
+ myUrl = theUrl;
+ }
+
+ /**
+ * Make this update a Conditional Update
+ */
+ @SuppressWarnings("unchecked")
+ public T conditional(String theConditionalUrl) {
+ myUrl.setValueAsString(theConditionalUrl);
+ return (T) this;
}
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java
index 06be9374ef8..3c03babc505 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java
@@ -29,11 +29,13 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.instance.model.api.IBase;
+import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.ICompositeType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import javax.annotation.Nullable;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@@ -53,8 +55,12 @@ public class OperationOutcomeUtil {
* @return Returns the newly added issue
*/
public static IBase addIssue(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome, String theSeverity, String theDetails, String theLocation, String theCode) {
+ return addIssue(theCtx, theOperationOutcome, theSeverity, theDetails, theLocation, theCode, null, null, null);
+ }
+
+ public static IBase addIssue(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome, String theSeverity, String theDetails, String theLocation, String theCode, @Nullable String theDetailSystem, @Nullable String theDetailCode, @Nullable String theDetailDescription) {
IBase issue = createIssue(theCtx, theOperationOutcome);
- populateDetails(theCtx, issue, theSeverity, theDetails, theLocation, theCode);
+ populateDetails(theCtx, issue, theSeverity, theDetails, theLocation, theCode, theDetailSystem, theDetailCode, theDetailDescription);
return issue;
}
@@ -127,17 +133,17 @@ public class OperationOutcomeUtil {
}
}
- private static void populateDetails(FhirContext theCtx, IBase theIssue, String theSeverity, String theDetails, String theLocation, String theCode) {
+ private static void populateDetails(FhirContext theCtx, IBase theIssue, String theSeverity, String theDetails, String theLocation, String theCode, String theDetailSystem, String theDetailCode, String theDetailDescription) {
BaseRuntimeElementCompositeDefinition> issueElement = (BaseRuntimeElementCompositeDefinition>) theCtx.getElementDefinition(theIssue.getClass());
- BaseRuntimeChildDefinition detailsChild;
- detailsChild = issueElement.getChildByName("diagnostics");
+ BaseRuntimeChildDefinition diagnosticsChild;
+ diagnosticsChild = issueElement.getChildByName("diagnostics");
BaseRuntimeChildDefinition codeChild = issueElement.getChildByName("code");
IPrimitiveType> codeElem = (IPrimitiveType>) codeChild.getChildByName("code").newInstance(codeChild.getInstanceConstructorArguments());
codeElem.setValueAsString(theCode);
codeChild.getMutator().addValue(theIssue, codeElem);
- BaseRuntimeElementDefinition> stringDef = detailsChild.getChildByName(detailsChild.getElementName());
+ BaseRuntimeElementDefinition> stringDef = diagnosticsChild.getChildByName(diagnosticsChild.getElementName());
BaseRuntimeChildDefinition severityChild = issueElement.getChildByName("severity");
IPrimitiveType> severityElem = (IPrimitiveType>) severityChild.getChildByName("severity").newInstance(severityChild.getInstanceConstructorArguments());
@@ -146,9 +152,27 @@ public class OperationOutcomeUtil {
IPrimitiveType> string = (IPrimitiveType>) stringDef.newInstance();
string.setValueAsString(theDetails);
- detailsChild.getMutator().setValue(theIssue, string);
+ diagnosticsChild.getMutator().setValue(theIssue, string);
addLocationToIssue(theCtx, theIssue, theLocation);
+
+ if (isNotBlank(theDetailSystem)) {
+ BaseRuntimeChildDefinition detailsChild = issueElement.getChildByName("details");
+ if (detailsChild != null) {
+ BaseRuntimeElementDefinition> codeableConceptDef = theCtx.getElementDefinition("CodeableConcept");
+ IBase codeableConcept = codeableConceptDef.newInstance();
+
+ BaseRuntimeElementDefinition> codingDef = theCtx.getElementDefinition("Coding");
+ IBaseCoding coding = (IBaseCoding) codingDef.newInstance();
+ coding.setSystem(theDetailSystem);
+ coding.setCode(theDetailCode);
+ coding.setDisplay(theDetailDescription);
+
+ codeableConceptDef.getChildByName("coding").getMutator().addValue(codeableConcept, coding);
+
+ detailsChild.getMutator().addValue(theIssue, codeableConcept);
+ }
+ }
}
public static void addLocationToIssue(FhirContext theContext, IBase theIssue, String theLocation) {
diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
index fa7558e5fb0..966e3fef2ad 100644
--- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
+++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
@@ -99,10 +99,22 @@ ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidParameterChain=Invalid parameter chain
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidVersion=Version "{0}" is not valid for resource {1}
ca.uhn.fhir.jpa.dao.BaseStorageDao.multipleParamsWithSameNameOneIsMissingTrue=This server does not know how to handle multiple "{0}" parameters where one has a value of :missing=true
ca.uhn.fhir.jpa.dao.BaseStorageDao.missingBody=No body was supplied in request
-ca.uhn.fhir.jpa.dao.BaseStorageDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Deletion failed.
-ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreate=Successfully created resource "{0}" in {1}ms
-ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdate=Successfully updated resource "{0}" in {1}ms
-ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulDeletes=Successfully deleted {0} resource(s) in {1}ms
+ca.uhn.fhir.jpa.dao.BaseStorageDao.unableToDeleteNotFound=Unable to find resource matching URL "{0}". Nothing has been deleted.
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreate=Successfully created resource "{0}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreateConditionalNoMatch=Successfully conditionally created resource "{0}". No existing resources matched URL "{1}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulCreateConditionalWithMatch=Successfully conditionally created resource "{0}". Existing resource matched URL "{1}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatch=Successfully patched resource "{0}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatchNoChange=Successfully patched resource "{0}" with no changes detected.
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatchConditional=Successfully conditionally patched resource. Existing resource {0} matched URL: {1}.
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulPatchConditionalNoChange=Successfully conditionally patched resource with no changes detected. Existing resource {0} matched URL: {1}.
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdate=Successfully updated resource "{0}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateNoChange=Successfully updated resource "{0}" with no changes detected.
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateAsCreate=Successfully created resource "{0}" using update as create (ie. create with client assigned ID).
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateConditionalWithMatch=Successfully conditionally updated resource "{0}". Existing resource matched URL "{1}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateConditionalNoMatch=Successfully conditionally updated resource "{0}". Created resource because no existing resource matched URL "{1}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulUpdateConditionalNoChangeWithMatch=Successfully conditionally updated resource "{0}" with no changes detected. Existing resource matched URL "{1}".
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulDeletes=Successfully deleted {0} resource(s).
+ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulTimingSuffix=Took {0}ms.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceNotExisting=Not deleted, resource {0} does not exist.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceAlreadyDeleted=Not deleted, resource {0} was already deleted.
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}
diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml
index 56b5d5fd7c0..825e4431039 100644
--- a/hapi-fhir-bom/pom.xml
+++ b/hapi-fhir-bom/pom.xml
@@ -3,14 +3,14 @@
4.0.0
ca.uhn.hapi.fhir
hapi-fhir-bom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
pom
HAPI FHIR BOM
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml
index a9a5cecf5e8..b986d1077f1 100644
--- a/hapi-fhir-checkstyle/pom.xml
+++ b/hapi-fhir-checkstyle/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
index b7730bbc885..70f2caf3980 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
index 0ed4c30293e..62b42c5e86b 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-fhir-cli
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml
index 6a108f56b8b..1c624dadc7c 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../../hapi-deployable-pom
diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml
index efc6dd0427f..1a3e2fe756d 100644
--- a/hapi-fhir-cli/pom.xml
+++ b/hapi-fhir-cli/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml
index c8ba145dea2..590d627faab 100644
--- a/hapi-fhir-client-okhttp/pom.xml
+++ b/hapi-fhir-client-okhttp/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml
index 599deda15a2..60c6713c747 100644
--- a/hapi-fhir-client/pom.xml
+++ b/hapi-fhir-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml
index a173b06c4c7..ebb58ce95dc 100644
--- a/hapi-fhir-converter/pom.xml
+++ b/hapi-fhir-converter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml
index e30f11dc432..f88a1cd4aa9 100644
--- a/hapi-fhir-dist/pom.xml
+++ b/hapi-fhir-dist/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml
index f9b1202c936..adffe9d0272 100644
--- a/hapi-fhir-docs/pom.xml
+++ b/hapi-fhir-docs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java
index 46998c941f0..39d9a3a3495 100644
--- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java
+++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java
@@ -26,6 +26,11 @@ import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.BooleanType;
+import org.hl7.fhir.r4.model.CodeType;
+import org.hl7.fhir.r4.model.IdType;
+import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient;
import java.math.BigDecimal;
@@ -109,6 +114,53 @@ public class BundleBuilderExamples {
//END SNIPPET: createConditional
}
+ public void patch() throws FHIRException {
+ //START SNIPPET: patch
+
+ // Create a FHIR Patch object
+ Parameters patch = new Parameters();
+ Parameters.ParametersParameterComponent op = patch.addParameter().setName("operation");
+ op.addPart().setName("type").setValue(new CodeType("replace"));
+ op.addPart().setName("path").setValue(new CodeType("Patient.active"));
+ op.addPart().setName("value").setValue(new BooleanType(false));
+
+ // Create a TransactionBuilder
+ BundleBuilder builder = new BundleBuilder(myFhirContext);
+
+ // Create a target object (this is the ID of the resource that will be patched)
+ IIdType targetId = new IdType("Patient/123");
+
+ // Add the patch to the bundle
+ builder.addTransactionFhirPatchEntry(targetId, patch);
+
+ // Execute the transaction
+ IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute();
+ //END SNIPPET: patch
+ }
+
+ public void patchConditional() throws FHIRException {
+ //START SNIPPET: patchConditional
+
+ // Create a FHIR Patch object
+ Parameters patch = new Parameters();
+ Parameters.ParametersParameterComponent op = patch.addParameter().setName("operation");
+ op.addPart().setName("type").setValue(new CodeType("replace"));
+ op.addPart().setName("path").setValue(new CodeType("Patient.active"));
+ op.addPart().setName("value").setValue(new BooleanType(false));
+
+ // Create a TransactionBuilder
+ BundleBuilder builder = new BundleBuilder(myFhirContext);
+
+ // Add the patch to the bundle with a conditional URL
+ String conditionalUrl = "Patient?identifier=http://foo|123";
+ builder.addTransactionFhirPatchEntry(patch).conditional(conditionalUrl);
+
+
+ // Execute the transaction
+ IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute();
+ //END SNIPPET: patchConditional
+ }
+
public void customizeBundle() throws FHIRException {
//START SNIPPET: customizeBundle
// Create a TransactionBuilder
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-add-conditional-delete-to-bundlebuilder.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-add-conditional-delete-to-bundlebuilder.yaml
new file mode 100644
index 00000000000..bf4907aab9b
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-add-conditional-delete-to-bundlebuilder.yaml
@@ -0,0 +1,6 @@
+---
+type: add
+issue: 4293
+title: "The BundleBuilder now supports adding conditional
+ DELETE operations, PATCH operations, and conditional PATCH
+ operations to a transaction bundle."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-improved-operationoutcome-for-jpa-cud.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-improved-operationoutcome-for-jpa-cud.yaml
new file mode 100644
index 00000000000..38ff5ad4a91
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-improved-operationoutcome-for-jpa-cud.yaml
@@ -0,0 +1,11 @@
+---
+type: add
+issue: 4293
+title: "When performing create/update/patch/delete operations against the JPA server, the response
+ OperationOutcome will now include additional details about the outcome of the operation. This
+ includes:
+
+ - For updates, the message will indicate the the update did not contain any changes (i.e. a No-op)
+ - For conditional creates/updates/deletes, the message will indicate whether the conditional URL matched any existing resources and the outcome of the operation.
+ - A new coding has been added to the
OperationOutcome.issue.details.coding
containing a machine processable equivalent to the outcome.
+
"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-include-carried-tags-in-transaction-response.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-include-carried-tags-in-transaction-response.yaml
new file mode 100644
index 00000000000..469d71838b5
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_3_0/4293-include-carried-tags-in-transaction-response.yaml
@@ -0,0 +1,7 @@
+---
+type: add
+issue: 4293
+title: "When updating resources using a FHIR transaction in the JPA server, if the
+ client instructs the server to include the resource body in the response, any
+ tags that have been carried forward from previous versions of the resource are
+ now included in the response."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/bundle_builder.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/bundle_builder.md
index 675c6308c3e..40156f9f4ce 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/bundle_builder.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/model/bundle_builder.md
@@ -36,7 +36,23 @@ If you want to perform a conditional update:
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java|updateConditional}}
```
-# Customizing bundle
+# Transaction Patch
+
+To add a PATCH operation to a transaction bundle:
+
+```java
+{{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java|patch}}
+```
+
+## Conditional Patch
+
+If you want to perform a conditional patch:
+
+```java
+{{snippet:classpath:/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java|patchConditional}}
+```
+
+# Customizing the Bundle
If you want to manipulate a bundle:
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml
index e839b35abb7..b5add09df5c 100644
--- a/hapi-fhir-jacoco/pom.xml
+++ b/hapi-fhir-jacoco/pom.xml
@@ -11,7 +11,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index 3cbf7c8c81d..2e20067fa7f 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml
index 367298467b4..19277e66e95 100644
--- a/hapi-fhir-jpa/pom.xml
+++ b/hapi-fhir-jpa/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
4.0.0
diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java
index c0cd943a143..90f5f512974 100644
--- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java
+++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java
@@ -39,14 +39,16 @@ public class SqlQuery {
private final StackTraceElement[] myStackTrace;
private final int mySize;
private final LanguageEnum myLanguage;
+ private final String myNamespace;
public SqlQuery(String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize) {
- this(theSql, theParams, theQueryTimestamp, theElapsedTime, theStackTraceElements, theSize, LanguageEnum.SQL);
+ this(null, theSql, theParams, theQueryTimestamp, theElapsedTime, theStackTraceElements, theSize, LanguageEnum.SQL);
}
- public SqlQuery(String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, LanguageEnum theLanguage) {
+ public SqlQuery(String theNamespace, String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, LanguageEnum theLanguage) {
Validate.notNull(theLanguage, "theLanguage must not be null");
+ myNamespace = theNamespace;
mySql = theSql;
myParams = Collections.unmodifiableList(theParams);
myQueryTimestamp = theQueryTimestamp;
@@ -56,6 +58,10 @@ public class SqlQuery {
myLanguage = theLanguage;
}
+ public String getNamespace() {
+ return myNamespace;
+ }
+
public long getQueryTimestamp() {
return myQueryTimestamp;
}
@@ -118,6 +124,10 @@ public class SqlQuery {
return mySize;
}
+ @Override
+ public String toString() {
+ return getSql(true, true);
+ }
public enum LanguageEnum {
@@ -125,9 +135,4 @@ public class SqlQuery {
JSON
}
-
- @Override
- public String toString() {
- return getSql(true, true);
- }
}
diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java
index 371728b9f6d..5ad9410b772 100644
--- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java
+++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java
@@ -384,7 +384,7 @@ public class TestUtil {
}
public static void sleepOneClick() {
- ca.uhn.fhir.util.TestUtil.sleepAtLeast(1);
+ ca.uhn.fhir.util.TestUtil.sleepAtLeast(1, false);
}
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 41eacea7e0f..2fff7a3da69 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java
index 9b1b5f64af6..de8adf4362c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java
@@ -31,6 +31,7 @@ import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSqlBuilder;
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSvcImpl;
import ca.uhn.fhir.jpa.reindex.Batch2DaoSvcImpl;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import javax.persistence.EntityManager;
@@ -43,7 +44,7 @@ public class Batch2SupportConfig {
}
@Bean
- public IDeleteExpungeSvc deleteExpungeSvc(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, IFulltextSearchSvc theFullTextSearchSvc) {
+ public IDeleteExpungeSvc deleteExpungeSvc(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) {
return new DeleteExpungeSvcImpl(theEntityManager, theDeleteExpungeSqlBuilder, theFullTextSearchSvc);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
index 207f69f94a5..853f2ae507e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
@@ -25,7 +25,9 @@ import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
+import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
@@ -230,7 +232,6 @@ public class JpaConfig {
return new ResponseTerminologyTranslationInterceptor(theValidationSupport, theResponseTerminologyTranslationSvc);
}
- @Lazy
@Bean
public ResponseTerminologyTranslationSvc responseTerminologyTranslationSvc(IValidationSupport theValidationSupport) {
return new ResponseTerminologyTranslationSvc(theValidationSupport);
@@ -265,6 +266,11 @@ public class JpaConfig {
return new ValueSetOperationProvider();
}
+ @Bean
+ public IJpaStorageResourceParser jpaStorageResourceParser() {
+ return new JpaStorageResourceParser();
+ }
+
@Bean
public TransactionProcessor transactionProcessor() {
return new TransactionProcessor();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 4d307a949dd..7dd6a83f366 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -16,6 +16,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.dao.IJpaDao;
+import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
@@ -29,8 +30,6 @@ import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
-import ca.uhn.fhir.jpa.entity.Search;
-import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
@@ -45,11 +44,9 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
-import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
-import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.searchparam.extractor.LogicalReferenceHelper;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
@@ -60,6 +57,7 @@ import ca.uhn.fhir.jpa.util.AddRemoveCount;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
+import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
import ca.uhn.fhir.model.api.Tag;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
@@ -71,14 +69,12 @@ import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
-import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
-import ca.uhn.fhir.rest.param.HistorySearchStyleEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
-import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
@@ -86,6 +82,8 @@ import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.MetaUtil;
+import ca.uhn.fhir.util.StopWatch;
+import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.XmlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
@@ -148,11 +146,9 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
-import java.util.UUID;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME;
-import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@@ -181,9 +177,18 @@ import static org.apache.commons.lang3.StringUtils.trim;
* #L%
*/
+
+/**
+ * TODO: JA - This class has only one subclass now. Historically it was a common
+ * ancestor for BaseHapiFhirSystemDao and BaseHapiFhirResourceDao but I've untangled
+ * the former from this hierarchy in order to simplify moving common functionality
+ * for resource DAOs into the hapi-fhir-storage project. This class should be merged
+ * into BaseHapiFhirResourceDao, but that should be done in its own dedicated PR
+ * since it'll be a noisy change.
+ */
@SuppressWarnings("WeakerAccess")
@Repository
-public abstract class BaseHapiFhirDao extends BaseStorageDao implements IDao, IJpaDao, ApplicationContextAware {
+public abstract class BaseHapiFhirDao extends BaseStorageResourceDao implements IDao, IJpaDao, ApplicationContextAware {
public static final long INDEX_STATUS_INDEXED = 1L;
public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
@@ -233,8 +238,6 @@ public abstract class BaseHapiFhirDao extends BaseStora
@Autowired
private PartitionSettings myPartitionSettings;
@Autowired
- private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
- @Autowired
private IPartitionLookupSvc myPartitionLookupSvc;
@Autowired
private MemoryCacheService myMemoryCacheService;
@@ -243,6 +246,8 @@ public abstract class BaseHapiFhirDao extends BaseStora
@Autowired
private PlatformTransactionManager myTransactionManager;
+ @Autowired
+ protected IJpaStorageResourceParser myJpaStorageResourceParser;
@VisibleForTesting
public void setSearchParamPresenceSvc(ISearchParamPresenceSvc theSearchParamPresenceSvc) {
@@ -371,14 +376,6 @@ public abstract class BaseHapiFhirDao extends BaseStora
myContext = theContext;
}
- public FhirContext getContext(FhirVersionEnum theVersion) {
- Validate.notNull(theVersion, "theVersion must not be null");
- if (theVersion == myFhirContext.getVersion().getVersion()) {
- return myFhirContext;
- }
- return FhirContext.forCached(theVersion);
- }
-
/**
* null
will only be returned if the scheme and tag are both blank
*/
@@ -513,27 +510,6 @@ public abstract class BaseHapiFhirDao extends BaseStora
return retVal;
}
- protected IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset) {
- return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null);
- }
-
- protected IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType) {
- String resourceName = defaultIfBlank(theResourceType, null);
-
- Search search = new Search();
- search.setOffset(theOffset);
- search.setDeleted(false);
- search.setCreated(new Date());
- search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive);
- search.setUuid(UUID.randomUUID().toString());
- search.setResourceType(resourceName);
- search.setResourceId(theResourcePid);
- search.setSearchType(SearchTypeEnum.HISTORY);
- search.setStatus(SearchStatusEnum.FINISHED);
- search.setHistorySearchStyle(searchParameterType);
-
- return myPersistedJpaBundleProviderFactory.newInstance(theRequest, search);
- }
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
String newVersion;
@@ -796,133 +772,6 @@ public abstract class BaseHapiFhirDao extends BaseStora
return !allTagsOld.equals(allTagsNew);
}
- @SuppressWarnings("unchecked")
- private R populateResourceMetadataHapi(Class theResourceType, IBaseResourceEntity theEntity, @Nullable Collection extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
- R retVal = (R) res;
- if (theEntity.getDeleted() != null) {
- res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
- retVal = (R) res;
- ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
- if (theForHistoryOperation) {
- ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.DELETE);
- }
- } else if (theForHistoryOperation) {
- /*
- * If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
- */
- Date published = theEntity.getPublished().getValue();
- Date updated = theEntity.getUpdated().getValue();
- if (published.equals(updated)) {
- ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.POST);
- } else {
- ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.PUT);
- }
- }
-
- res.setId(theEntity.getIdDt().withVersion(theVersion.toString()));
-
- ResourceMetadataKeyEnum.VERSION.put(res, Long.toString(theEntity.getVersion()));
- ResourceMetadataKeyEnum.PUBLISHED.put(res, theEntity.getPublished());
- ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
- IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
-
- if (theTagList != null) {
- if (theEntity.isHasTags()) {
- TagList tagList = new TagList();
- List securityLabels = new ArrayList<>();
- List profiles = new ArrayList<>();
- for (BaseTag next : theTagList) {
- switch (next.getTag().getTagType()) {
- case PROFILE:
- profiles.add(new IdDt(next.getTag().getCode()));
- break;
- case SECURITY_LABEL:
- IBaseCoding secLabel = (IBaseCoding) myContext.getVersion().newCodingDt();
- secLabel.setSystem(next.getTag().getSystem());
- secLabel.setCode(next.getTag().getCode());
- secLabel.setDisplay(next.getTag().getDisplay());
- securityLabels.add(secLabel);
- break;
- case TAG:
- tagList.add(new Tag(next.getTag().getSystem(), next.getTag().getCode(), next.getTag().getDisplay()));
- break;
- }
- }
- if (tagList.size() > 0) {
- ResourceMetadataKeyEnum.TAG_LIST.put(res, tagList);
- }
- if (securityLabels.size() > 0) {
- ResourceMetadataKeyEnum.SECURITY_LABELS.put(res, toBaseCodingList(securityLabels));
- }
- if (profiles.size() > 0) {
- ResourceMetadataKeyEnum.PROFILES.put(res, profiles);
- }
- }
- }
-
- return retVal;
- }
-
- @SuppressWarnings("unchecked")
- private R populateResourceMetadataRi(Class theResourceType, IBaseResourceEntity theEntity, @Nullable Collection extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
- R retVal = (R) res;
- if (theEntity.getDeleted() != null) {
- res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
- retVal = (R) res;
- ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
- if (theForHistoryOperation) {
- ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, HTTPVerb.DELETE.toCode());
- }
- } else if (theForHistoryOperation) {
- /*
- * If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
- */
- Date published = theEntity.getPublished().getValue();
- Date updated = theEntity.getUpdated().getValue();
- if (published.equals(updated)) {
- ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, HTTPVerb.POST.toCode());
- } else {
- ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, HTTPVerb.PUT.toCode());
- }
- }
-
- res.getMeta().setLastUpdated(null);
- res.getMeta().setVersionId(null);
-
- updateResourceMetadata(theEntity, res);
- res.setId(res.getIdElement().withVersion(theVersion.toString()));
-
- res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
- IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
-
- if (theTagList != null) {
- res.getMeta().getTag().clear();
- res.getMeta().getProfile().clear();
- res.getMeta().getSecurity().clear();
- for (BaseTag next : theTagList) {
- switch (next.getTag().getTagType()) {
- case PROFILE:
- res.getMeta().addProfile(next.getTag().getCode());
- break;
- case SECURITY_LABEL:
- IBaseCoding sec = res.getMeta().addSecurity();
- sec.setSystem(next.getTag().getSystem());
- sec.setCode(next.getTag().getCode());
- sec.setDisplay(next.getTag().getDisplay());
- break;
- case TAG:
- IBaseCoding tag = res.getMeta().addTag();
- tag.setSystem(next.getTag().getSystem());
- tag.setCode(next.getTag().getCode());
- tag.setDisplay(next.getTag().getDisplay());
- break;
- }
- }
- }
-
- return retVal;
- }
-
/**
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
*
@@ -954,6 +803,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
// nothing
}
+ @Override
@CoverageIgnore
public BaseHasResource readEntity(IIdType theValueId, RequestDetails theRequest) {
throw new NotImplementedException(Msg.code(927) + "");
@@ -1005,220 +855,8 @@ public abstract class BaseHapiFhirDao extends BaseStora
return metaSnapshotModeTokens.contains(theTag.getTag().getTagType());
}
- @Override
- public IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
- RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
- Class extends IBaseResource> resourceType = type.getImplementingClass();
- return toResource(resourceType, theEntity, null, theForHistoryOperation);
- }
- @SuppressWarnings("unchecked")
- @Override
- public R toResource(Class theResourceType, IBaseResourceEntity theEntity, Collection theTagList, boolean theForHistoryOperation) {
- // 1. get resource, it's encoding and the tags if any
- byte[] resourceBytes;
- String resourceText;
- ResourceEncodingEnum resourceEncoding;
- @Nullable
- Collection extends BaseTag> tagList = Collections.emptyList();
- long version;
- String provenanceSourceUri = null;
- String provenanceRequestId = null;
-
- if (theEntity instanceof ResourceHistoryTable) {
- ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
- resourceBytes = history.getResource();
- resourceText = history.getResourceTextVc();
- resourceEncoding = history.getEncoding();
- switch (getConfig().getTagStorageMode()) {
- case VERSIONED:
- default:
- if (history.isHasTags()) {
- tagList = history.getTags();
- }
- break;
- case NON_VERSIONED:
- if (history.getResourceTable().isHasTags()) {
- tagList = history.getResourceTable().getTags();
- }
- break;
- case INLINE:
- tagList = null;
- }
- version = history.getVersion();
- if (history.getProvenance() != null) {
- provenanceRequestId = history.getProvenance().getRequestId();
- provenanceSourceUri = history.getProvenance().getSourceUri();
- }
- } else if (theEntity instanceof ResourceTable) {
- ResourceTable resource = (ResourceTable) theEntity;
- ResourceHistoryTable history;
- if (resource.getCurrentVersionEntity() != null) {
- history = resource.getCurrentVersionEntity();
- } else {
- version = theEntity.getVersion();
- history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
- ((ResourceTable) theEntity).setCurrentVersionEntity(history);
-
- while (history == null) {
- if (version > 1L) {
- version--;
- history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
- } else {
- return null;
- }
- }
- }
-
- resourceBytes = history.getResource();
- resourceEncoding = history.getEncoding();
- resourceText = history.getResourceTextVc();
- switch (getConfig().getTagStorageMode()) {
- case VERSIONED:
- case NON_VERSIONED:
- if (resource.isHasTags()) {
- tagList = resource.getTags();
- } else {
- tagList = Collections.emptyList();
- }
- break;
- case INLINE:
- tagList = null;
- break;
- }
- version = history.getVersion();
- if (history.getProvenance() != null) {
- provenanceRequestId = history.getProvenance().getRequestId();
- provenanceSourceUri = history.getProvenance().getSourceUri();
- }
- } else if (theEntity instanceof ResourceSearchView) {
- // This is the search View
- ResourceSearchView view = (ResourceSearchView) theEntity;
- resourceBytes = view.getResource();
- resourceText = view.getResourceTextVc();
- resourceEncoding = view.getEncoding();
- version = view.getVersion();
- provenanceRequestId = view.getProvenanceRequestId();
- provenanceSourceUri = view.getProvenanceSourceUri();
- switch (getConfig().getTagStorageMode()) {
- case VERSIONED:
- case NON_VERSIONED:
- if (theTagList != null) {
- tagList = theTagList;
- } else {
- tagList = Collections.emptyList();
- }
- break;
- case INLINE:
- tagList = null;
- break;
- }
- } else {
- // something wrong
- return null;
- }
-
- // 2. get The text
- String decodedResourceText;
- if (resourceText != null) {
- decodedResourceText = resourceText;
- } else {
- decodedResourceText = decodeResource(resourceBytes, resourceEncoding);
- }
-
- // 3. Use the appropriate custom type if one is specified in the context
- Class resourceType = theResourceType;
- if (tagList != null) {
- if (myContext.hasDefaultTypeForProfile()) {
- for (BaseTag nextTag : tagList) {
- if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
- String profile = nextTag.getTag().getCode();
- if (isNotBlank(profile)) {
- Class extends IBaseResource> newType = myContext.getDefaultTypeForProfile(profile);
- if (newType != null && theResourceType.isAssignableFrom(newType)) {
- ourLog.debug("Using custom type {} for profile: {}", newType.getName(), profile);
- resourceType = (Class) newType;
- break;
- }
- }
- }
- }
- }
- }
-
- // 4. parse the text to FHIR
- R retVal;
- if (resourceEncoding != ResourceEncodingEnum.DEL) {
-
- LenientErrorHandler errorHandler = new LenientErrorHandler(false).setErrorOnInvalidValue(false);
- IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), errorHandler, theEntity.getId());
-
- try {
- retVal = parser.parseResource(resourceType, decodedResourceText);
- } catch (Exception e) {
- StringBuilder b = new StringBuilder();
- b.append("Failed to parse database resource[");
- b.append(myFhirContext.getResourceType(resourceType));
- b.append("/");
- b.append(theEntity.getIdDt().getIdPart());
- b.append(" (pid ");
- b.append(theEntity.getId());
- b.append(", version ");
- b.append(theEntity.getFhirVersion().name());
- b.append("): ");
- b.append(e.getMessage());
- String msg = b.toString();
- ourLog.error(msg, e);
- throw new DataFormatException(Msg.code(928) + msg, e);
- }
-
- } else {
-
- retVal = (R) myContext.getResourceDefinition(theEntity.getResourceType()).newInstance();
-
- }
-
- // 5. fill MetaData
- retVal = populateResourceMetadata(theEntity, theForHistoryOperation, tagList, version, resourceType, retVal);
-
- // 6. Handle source (provenance)
- if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
- String sourceString = cleanProvenanceSourceUri(provenanceSourceUri)
- + (isNotBlank(provenanceRequestId) ? "#" : "")
- + defaultString(provenanceRequestId);
-
- MetaUtil.setSource(myContext, retVal, sourceString);
- }
-
- // 7. Add partition information
- if (myPartitionSettings.isPartitioningEnabled()) {
- PartitionablePartitionId partitionId = theEntity.getPartitionId();
- if (partitionId != null && partitionId.getPartitionId() != null) {
- PartitionEntity persistedPartition = myPartitionLookupSvc.getPartitionById(partitionId.getPartitionId());
- retVal.setUserData(Constants.RESOURCE_PARTITION_ID, persistedPartition.toRequestPartitionId());
- } else {
- retVal.setUserData(Constants.RESOURCE_PARTITION_ID, null);
- }
- }
-
- return retVal;
- }
-
- protected R populateResourceMetadata(IBaseResourceEntity theEntity, boolean theForHistoryOperation, @Nullable Collection extends BaseTag> tagList, long theVersion, Class theResourceType, R theResource) {
- if (theResource instanceof IResource) {
- IResource res = (IResource) theResource;
- theResource = populateResourceMetadataHapi(theResourceType, theEntity, tagList, theForHistoryOperation, res, theVersion);
- } else {
- IAnyResource res = (IAnyResource) theResource;
- theResource = populateResourceMetadataRi(theResourceType, theEntity, tagList, theForHistoryOperation, res, theVersion);
- }
- return theResource;
- }
-
- public String toResourceName(Class extends IBaseResource> theResourceType) {
- return myContext.getResourceType(theResourceType);
- }
String toResourceName(IBaseResource theResource) {
return myContext.getResourceType(theResource);
@@ -1375,7 +1013,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) {
ourLog.debug("Resource {} has not changed", entity.getIdDt().toUnqualified().getValue());
if (theResource != null) {
- updateResourceMetadata(entity, theResource);
+ myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
}
entity.setUnchangedInCurrentOperation(true);
return entity;
@@ -1475,7 +1113,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
}
if (theResource != null) {
- updateResourceMetadata(entity, theResource);
+ myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
}
@@ -1498,7 +1136,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
if (getConfig().isMassIngestionMode()) {
oldResource = null;
} else {
- oldResource = toResource(entity, false);
+ oldResource = myJpaStorageResourceParser.toResource(entity, false);
}
notifyInterceptors(theRequest, theResource, oldResource, theTransactionDetails, true);
@@ -1510,7 +1148,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
historyEntity = ((ResourceTable) readEntity(theResourceId, theRequest)).getCurrentVersionEntity();
// Update version/lastUpdated so that interceptors see the correct version
- updateResourceMetadata(savedEntity, theResource);
+ myJpaStorageResourceParser.updateResourceMetadata(savedEntity, theResource);
// Populate the PID in the resource, so it is available to hooks
addPidToResource(savedEntity, theResource);
@@ -1537,7 +1175,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
if (!changed && myConfig.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) {
ourLog.debug("Resource {} has not changed", historyEntity.getIdDt().toUnqualified().getValue());
- updateResourceMetadata(historyEntity, theResource);
+ myJpaStorageResourceParser.updateResourceMetadata(historyEntity, theResource);
return historyEntity;
}
@@ -1556,7 +1194,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
historyEntity.setResourceTextVc(encodedResource.getResourceText());
myResourceHistoryTableDao.save(historyEntity);
- updateResourceMetadata(historyEntity, theResource);
+ myJpaStorageResourceParser.updateResourceMetadata(historyEntity, theResource);
return historyEntity;
}
@@ -1586,14 +1224,15 @@ public abstract class BaseHapiFhirDao extends BaseStora
/**
* TODO eventually consider refactoring this to be part of an interceptor.
- *
+ *
* Throws an exception if the partition of the request, and the partition of the existing entity do not match.
+ *
* @param theRequest the request.
- * @param entity the existing entity.
+ * @param entity the existing entity.
*/
private void failIfPartitionMismatch(RequestDetails theRequest, ResourceTable entity) {
if (myPartitionSettings.isPartitioningEnabled() && theRequest != null && theRequest.getTenantId() != null && entity.getPartitionId() != null &&
- theRequest.getTenantId() != ALL_PARTITIONS_NAME) {
+ !ALL_PARTITIONS_NAME.equals(theRequest.getTenantId())) {
PartitionEntity partitionEntity = myPartitionLookupSvc.getPartitionByName(theRequest.getTenantId());
//partitionEntity should never be null
if (partitionEntity != null && !partitionEntity.getId().equals(entity.getPartitionId().getPartitionId())) {
@@ -1668,8 +1307,8 @@ public abstract class BaseHapiFhirDao extends BaseStora
}
@Override
- public ResourceTable updateInternal(RequestDetails theRequestDetails, T theResource, boolean thePerformIndexing, boolean theForceUpdateVersion,
- IBasePersistedResource theEntity, IIdType theResourceId, IBaseResource theOldResource, TransactionDetails theTransactionDetails) {
+ public DaoMethodOutcome updateInternal(RequestDetails theRequestDetails, T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion,
+ IBasePersistedResource theEntity, IIdType theResourceId, @Nullable IBaseResource theOldResource, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) {
ResourceTable entity = (ResourceTable) theEntity;
@@ -1696,7 +1335,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
}
// Update version/lastUpdated so that interceptors see the correct version
- updateResourceMetadata(savedEntity, theResource);
+ myJpaStorageResourceParser.updateResourceMetadata(savedEntity, theResource);
// Populate the PID in the resource so it is available to hooks
addPidToResource(savedEntity, theResource);
@@ -1706,7 +1345,42 @@ public abstract class BaseHapiFhirDao extends BaseStora
notifyInterceptors(theRequestDetails, theResource, theOldResource, theTransactionDetails, false);
}
- return savedEntity;
+ Collection extends BaseTag> tagList = Collections.emptyList();
+ if (entity.isHasTags()) {
+ tagList = entity.getTags();
+ }
+ long version = entity.getVersion();
+ myJpaStorageResourceParser.populateResourceMetadata(entity, false, tagList, version, theResource);
+
+ boolean wasDeleted = false;
+ // NB If this if-else ever gets collapsed, make sure to account for possible null (will happen in mass-ingestion mode)
+ if (theOldResource instanceof IResource) {
+ wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) theOldResource) != null;
+ } else if (theOldResource instanceof IAnyResource) {
+ wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) theOldResource) != null;
+ }
+
+ DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType).setCreated(wasDeleted);
+
+ if (!thePerformIndexing) {
+ IIdType id = getContext().getVersion().newIdType();
+ id.setValue(entity.getIdDt().getValue());
+ outcome.setId(id);
+ }
+
+ // Only include a task timer if we're not in a sub-request (i.e. a transaction)
+ // since individual item times don't actually make much sense in the context
+ // of a transaction
+ StopWatch w = null;
+ if (theRequestDetails != null && !theRequestDetails.isSubRequest()) {
+ if (theTransactionDetails != null && !theTransactionDetails.isFhirTransaction()) {
+ w = new StopWatch(theTransactionDetails.getTransactionDate());
+ }
+ }
+
+ populateOperationOutcomeForUpdate(w, outcome, theMatchUrl, outcome.getOperationType());
+
+ return outcome;
}
private void notifyInterceptors(RequestDetails theRequestDetails, T theResource, IBaseResource theOldResource, TransactionDetails theTransactionDetails, boolean isUnchanged) {
@@ -1735,26 +1409,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
}
}
- protected void updateResourceMetadata(IBaseResourceEntity theEntity, IBaseResource theResource) {
- IIdType id = theEntity.getIdDt();
- if (getContext().getVersion().getVersion().isRi()) {
- id = getContext().getVersion().newIdType().setValue(id.getValue());
- }
- if (id.hasResourceType() == false) {
- id = id.withResourceType(theEntity.getResourceType());
- }
-
- theResource.setId(id);
- if (theResource instanceof IResource) {
- ResourceMetadataKeyEnum.VERSION.put((IResource) theResource, id.getVersionIdPart());
- ResourceMetadataKeyEnum.UPDATED.put((IResource) theResource, theEntity.getUpdated());
- } else {
- IBaseMetaType meta = theResource.getMeta();
- meta.setVersionId(id.getVersionIdPart());
- meta.setLastUpdated(theEntity.getUpdatedDate());
- }
- }
private void validateChildReferenceTargetTypes(IBase theElement, String thePath) {
if (theElement == null) {
@@ -1896,6 +1551,22 @@ public abstract class BaseHapiFhirDao extends BaseStora
myPartitionSettings = thePartitionSettings;
}
+ private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
+
+ private final TagDefinition myTagDefinition;
+ private final MemoryCacheService.TagDefinitionCacheKey myKey;
+
+ public AddTagDefinitionToCacheAfterCommitSynchronization(MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
+ myTagDefinition = theTagDefinition;
+ myKey = theKey;
+ }
+
+ @Override
+ public void afterCommit() {
+ myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
+ }
+ }
+
@Nonnull
public static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(TagTypeEnum theTagType, String theScheme, String theTerm) {
return new MemoryCacheService.TagDefinitionCacheKey(theTagType, theScheme, theTerm);
@@ -1999,34 +1670,12 @@ public abstract class BaseHapiFhirDao extends BaseStora
ourValidationDisabledForUnitTest = theValidationDisabledForUnitTest;
}
- private static List toBaseCodingList(List theSecurityLabels) {
- ArrayList retVal = new ArrayList<>(theSecurityLabels.size());
- for (IBaseCoding next : theSecurityLabels) {
- retVal.add((BaseCodingDt) next);
- }
- return retVal;
- }
-
- public static void validateResourceType(BaseHasResource theEntity, String theResourceName) {
- if (!theResourceName.equals(theEntity.getResourceType())) {
- throw new ResourceNotFoundException(Msg.code(935) + "Resource with ID " + theEntity.getIdDt().getIdPart() + " exists but it is not of type " + theResourceName + ", found resource of type " + theEntity.getResourceType());
- }
- }
-
- private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
-
- private final TagDefinition myTagDefinition;
- private final MemoryCacheService.TagDefinitionCacheKey myKey;
-
- public AddTagDefinitionToCacheAfterCommitSynchronization(MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
- myTagDefinition = theTagDefinition;
- myKey = theKey;
- }
-
- @Override
- public void afterCommit() {
- myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
- }
+ /**
+ * Do not call this method outside of unit tests
+ */
+ @VisibleForTesting
+ public void setJpaStorageResourceParserForUnitTest(IJpaStorageResourceParser theJpaStorageResourceParser) {
+ myJpaStorageResourceParser = theJpaStorageResourceParser;
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index afda3ead6f4..ae584a2d5a6 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -57,26 +57,23 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
-import ca.uhn.fhir.jpa.patch.FhirPatch;
-import ca.uhn.fhir.jpa.patch.JsonPatchUtils;
-import ca.uhn.fhir.jpa.patch.XmlPatchUtils;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
+import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.api.IQueryParameterType;
+import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
import ca.uhn.fhir.model.dstu2.resource.ListResource;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.parser.DataFormatException;
-import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome;
-import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
import ca.uhn.fhir.rest.api.ValidationModeEnum;
@@ -104,9 +101,9 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.ObjectUtil;
-import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.ReflectionUtil;
import ca.uhn.fhir.util.StopWatch;
+import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.IInstanceValidatorModule;
import ca.uhn.fhir.validation.IValidationContext;
@@ -118,7 +115,6 @@ import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseMetaType;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
-import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
@@ -160,7 +156,6 @@ public abstract class BaseHapiFhirResourceDao extends B
public static final String BASE_RESOURCE_NAME = "resource";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
-
@Autowired
protected PlatformTransactionManager myPlatformTransactionManager;
@Autowired(required = false)
@@ -181,18 +176,37 @@ public abstract class BaseHapiFhirResourceDao extends B
private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
@Autowired
private IJobCoordinator myJobCoordinator;
-
private IInstanceValidatorModule myInstanceValidator;
private String myResourceName;
private Class myResourceType;
-
+ @Autowired
+ private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
@Autowired
private MemoryCacheService myMemoryCacheService;
private TransactionTemplate myTxTemplate;
-
@Autowired
private UrlPartitioner myUrlPartitioner;
+ @Override
+ protected HapiTransactionService getTransactionService() {
+ return myTransactionService;
+ }
+
+ @VisibleForTesting
+ public void setTransactionService(HapiTransactionService theTransactionService) {
+ myTransactionService = theTransactionService;
+ }
+
+ @Override
+ protected MatchResourceUrlService getMatchResourceUrlService() {
+ return myMatchResourceUrlService;
+ }
+
+ @Override
+ protected IStorageResourceParser getStorageResourceParser() {
+ return myJpaStorageResourceParser;
+ }
+
/**
* @deprecated Use {@link #create(T, RequestDetails)} instead
*/
@@ -219,11 +233,6 @@ public abstract class BaseHapiFhirResourceDao extends B
return create(theResource, theIfNoneExist, true, new TransactionDetails(), theRequestDetails);
}
- @VisibleForTesting
- public void setTransactionService(HapiTransactionService theTransactionService) {
- myTransactionService = theTransactionService;
- }
-
@Override
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, @Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails));
@@ -259,14 +268,14 @@ public abstract class BaseHapiFhirResourceDao extends B
}
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
- return doCreateForPostOrPut(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails, requestPartitionId);
+ return doCreateForPostOrPut(theRequestDetails, theResource, theIfNoneExist, true, thePerformIndexing, requestPartitionId, RestOperationTypeEnum.CREATE, theTransactionDetails);
}
/**
* Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails)}
* as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails)}.
*/
- private DaoMethodOutcome doCreateForPostOrPut(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
+ private DaoMethodOutcome doCreateForPostOrPut(RequestDetails theRequest, T theResource, String theMatchUrl, boolean theProcessMatchUrl, boolean thePerformIndexing, RequestPartitionId theRequestPartitionId, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) {
StopWatch w = new StopWatch();
preProcessResourceForStorage(theResource);
@@ -275,13 +284,13 @@ public abstract class BaseHapiFhirResourceDao extends B
ResourceTable entity = new ResourceTable();
entity.setResourceType(toResourceName(theResource));
entity.setPartitionId(myRequestPartitionHelperService.toStoragePartition(theRequestPartitionId));
- entity.setCreatedByMatchUrl(theIfNoneExist);
+ entity.setCreatedByMatchUrl(theMatchUrl);
entity.setVersion(1);
- if (isNotBlank(theIfNoneExist)) {
- Set match = myMatchResourceUrlService.processMatchUrl(theIfNoneExist, myResourceType, theTransactionDetails, theRequest);
+ if (isNotBlank(theMatchUrl) && theProcessMatchUrl) {
+ Set match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest);
if (match.size() > 1) {
- String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theIfNoneExist, match.size());
+ String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theMatchUrl, match.size());
throw new PreconditionFailedException(Msg.code(958) + msg);
} else if (match.size() == 1) {
ResourcePersistentId pid = match.iterator().next();
@@ -289,7 +298,7 @@ public abstract class BaseHapiFhirResourceDao extends B
Supplier entitySupplier = () -> {
return myTxTemplate.execute(tx -> {
ResourceTable foundEntity = myEntityManager.find(ResourceTable.class, pid.getId());
- IBaseResource resource = toResource(foundEntity, false);
+ IBaseResource resource = myJpaStorageResourceParser.toResource(foundEntity, false);
theResource.setId(resource.getIdElement().getValue());
return new LazyDaoMethodOutcome.EntityAndResource(foundEntity, resource);
});
@@ -314,7 +323,11 @@ public abstract class BaseHapiFhirResourceDao extends B
});
};
- return toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
+ DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
+ StorageResponseCodeEnum responseCode = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH;
+ String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreateConditionalWithMatch", w.getMillisAndRestart(), UrlUtil.sanitizeUrlPart(theMatchUrl));
+ outcome.setOperationOutcome(createInfoOperationOutcome(msg, responseCode));
+ return outcome;
}
}
@@ -385,15 +398,15 @@ public abstract class BaseHapiFhirResourceDao extends B
theTransactionDetails.addResolvedResourceId(persistentId.getAssociatedResourceId(), persistentId);
// Pre-cache the match URL
- if (theIfNoneExist != null) {
- myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theIfNoneExist, persistentId);
+ if (theMatchUrl != null) {
+ myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theMatchUrl, persistentId);
}
// Update the version/last updated in the resource so that interceptors get
// the correct version
// TODO - the above updateEntity calls updateResourceMetadata
// Maybe we don't need this call here?
- updateResourceMetadata(entity, theResource);
+ myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
// Populate the PID in the resource so it is available to hooks
addPidToResource(entity, theResource);
@@ -409,15 +422,15 @@ public abstract class BaseHapiFhirResourceDao extends B
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, hookParams);
}
- DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, theResource).setCreated(true);
+ DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, theResource, theMatchUrl, theOperationType)
+ .setCreated(true);
+
if (!thePerformIndexing) {
outcome.setId(theResource.getIdElement());
}
- String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
- outcome.setOperationOutcome(createInfoOperationOutcome(msg));
+ populateOperationOutcomeForUpdate(w, outcome, theMatchUrl, theOperationType);
- ourLog.debug(msg);
return outcome;
}
@@ -531,8 +544,7 @@ public abstract class BaseHapiFhirResourceDao extends B
// if not found, return an outcome anyways.
// Because no object actually existed, we'll
// just set the id and nothing else
- DaoMethodOutcome outcome = createMethodOutcomeForResourceId(theId.getValue(), MESSAGE_KEY_DELETE_RESOURCE_NOT_EXISTING);
- return outcome;
+ return createMethodOutcomeForResourceId(theId.getValue(), MESSAGE_KEY_DELETE_RESOURCE_NOT_EXISTING, StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND);
}
if (theId.hasVersionIdPart() && Long.parseLong(theId.getVersionIdPart()) != entity.getVersion()) {
@@ -541,7 +553,7 @@ public abstract class BaseHapiFhirResourceDao extends B
// Don't delete again if it's already deleted
if (isDeleted(entity)) {
- DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED);
+ DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED, StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED);
// used to exist, so we'll set the persistent id
outcome.setPersistentId(new ResourcePersistentId(entity.getResourceId()));
@@ -552,7 +564,7 @@ public abstract class BaseHapiFhirResourceDao extends B
StopWatch w = new StopWatch();
- T resourceToDelete = toResource(myResourceType, entity, null, false);
+ T resourceToDelete = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
theDeleteConflicts.setResourceIdMarkedForDeletion(theId);
// Notify IServerOperationInterceptors about pre-action call
@@ -581,14 +593,11 @@ public abstract class BaseHapiFhirResourceDao extends B
doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
- DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete).setCreated(true);
+ DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete, null, RestOperationTypeEnum.DELETE).setCreated(true);
- IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext());
- String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", 1, w.getMillis());
- String severity = "information";
- String code = "informational";
- OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
- outcome.setOperationOutcome(oo);
+ String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulDeletes", 1);
+ msg += " " + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis());
+ outcome.setOperationOutcome(createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE));
return outcome;
}
@@ -669,7 +678,7 @@ public abstract class BaseHapiFhirResourceDao extends B
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid.getIdAsLong());
deletedResources.add(entity);
- T resourceToDelete = toResource(myResourceType, entity, null, false);
+ T resourceToDelete = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call
HookParams hooks = new HookParams()
@@ -703,17 +712,12 @@ public abstract class BaseHapiFhirResourceDao extends B
IBaseOperationOutcome oo;
if (deletedResources.isEmpty()) {
- oo = OperationOutcomeUtil.newInstance(getContext());
- String message = getMessageSanitized("unableToDeleteNotFound", theUrl);
- String severity = "warning";
- String code = "not-found";
- OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
+ String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "unableToDeleteNotFound", theUrl);
+ oo = createOperationOutcome(OO_SEVERITY_WARN, msg, "not-found", StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND);
} else {
- oo = OperationOutcomeUtil.newInstance(getContext());
- String message = getContext().getLocalizer().getMessage(BaseStorageDao.class, "successfulDeletes", deletedResources.size(), w.getMillis());
- String severity = "information";
- String code = "informational";
- OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
+ String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulDeletes", deletedResources.size());
+ msg += " " + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis());
+ oo = createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE);
}
ourLog.debug("Processed delete on {} (matched {} resource(s)) in {}ms", theUrl, deletedResources.size(), w.getMillis());
@@ -745,7 +749,7 @@ public abstract class BaseHapiFhirResourceDao extends B
}
private void doMetaAdd(MT theMetaAdd, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
- IBaseResource oldVersion = toResource(theEntity, false);
+ IBaseResource oldVersion = myJpaStorageResourceParser.toResource(theEntity, false);
List tags = toTagList(theMetaAdd);
for (TagDefinition nextDef : tags) {
@@ -778,7 +782,7 @@ public abstract class BaseHapiFhirResourceDao extends B
myEntityManager.merge(theEntity);
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
- IBaseResource newVersion = toResource(theEntity, false);
+ IBaseResource newVersion = myJpaStorageResourceParser.toResource(theEntity, false);
HookParams preStorageParams = new HookParams()
.add(IBaseResource.class, oldVersion)
.add(IBaseResource.class, newVersion)
@@ -802,7 +806,7 @@ public abstract class BaseHapiFhirResourceDao extends B
private void doMetaDelete(MT theMetaDel, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
// todo mb update hibernate search index if we are storing resources - it assumes inline tags.
- IBaseResource oldVersion = toResource(theEntity, false);
+ IBaseResource oldVersion = myJpaStorageResourceParser.toResource(theEntity, false);
List tags = toTagList(theMetaDel);
@@ -824,7 +828,7 @@ public abstract class BaseHapiFhirResourceDao extends B
theEntity = myEntityManager.merge(theEntity);
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
- IBaseResource newVersion = toResource(theEntity, false);
+ IBaseResource newVersion = myJpaStorageResourceParser.toResource(theEntity, false);
HookParams preStorageParams = new HookParams()
.add(IBaseResource.class, oldVersion)
.add(IBaseResource.class, newVersion)
@@ -889,6 +893,7 @@ public abstract class BaseHapiFhirResourceDao extends B
}
@Override
+ @Nonnull
public String getResourceName() {
return myResourceName;
}
@@ -908,7 +913,7 @@ public abstract class BaseHapiFhirResourceDao extends B
@Transactional
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch();
- IBundleProvider retVal = super.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset);
+ IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset);
ourLog.debug("Processed history on {} in {}ms", myResourceName, w.getMillisAndRestart());
return retVal;
}
@@ -924,7 +929,7 @@ public abstract class BaseHapiFhirResourceDao extends B
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
BaseHasResource entity = readEntity(id, theRequest);
- IBundleProvider retVal = super.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset);
+ IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset);
ourLog.debug("Processed history on {} in {}ms", id, w.getMillisAndRestart());
return retVal;
@@ -939,7 +944,7 @@ public abstract class BaseHapiFhirResourceDao extends B
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
BaseHasResource entity = readEntity(id, theRequest);
- IBundleProvider retVal = super.history(theRequest, myResourceName, entity.getId(),
+ IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(),
theHistorySearchDateRangeParam.getLowerBoundAsInstant(),
theHistorySearchDateRangeParam.getUpperBoundAsInstant(),
theHistorySearchDateRangeParam.getOffset(),
@@ -1096,67 +1101,6 @@ public abstract class BaseHapiFhirResourceDao extends B
return toMetaDt(theType, tagDefinitions);
}
- @Override
- public DaoMethodOutcome patch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequest) {
- TransactionDetails transactionDetails = new TransactionDetails();
- return myTransactionService.execute(theRequest, transactionDetails, tx -> doPatch(theId, theConditionalUrl, thePatchType, thePatchBody, theFhirPatchBody, theRequest, transactionDetails));
- }
-
- private DaoMethodOutcome doPatch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
- ResourceTable entityToUpdate;
- if (isNotBlank(theConditionalUrl)) {
-
- Set match = myMatchResourceUrlService.processMatchUrl(theConditionalUrl, myResourceType, theTransactionDetails, theRequest);
- if (match.size() > 1) {
- String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "PATCH", theConditionalUrl, match.size());
- throw new PreconditionFailedException(Msg.code(972) + msg);
- } else if (match.size() == 1) {
- ResourcePersistentId pid = match.iterator().next();
- entityToUpdate = myEntityManager.find(ResourceTable.class, pid.getId());
- } else {
- String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidMatchUrlNoMatches", theConditionalUrl);
- throw new ResourceNotFoundException(Msg.code(973) + msg);
- }
-
- } else {
- entityToUpdate = readEntityLatestVersion(theId, theRequest, theTransactionDetails);
- if (theId.hasVersionIdPart()) {
- if (theId.getVersionIdPartAsLong() != entityToUpdate.getVersion()) {
- throw new ResourceVersionConflictException(Msg.code(974) + "Version " + theId.getVersionIdPart() + " is not the most recent version of this resource, unable to apply patch");
- }
- }
- }
-
- validateResourceType(entityToUpdate);
-
- if (isDeleted(entityToUpdate)) {
- throw createResourceGoneException(entityToUpdate);
- }
-
- IBaseResource resourceToUpdate = toResource(entityToUpdate, false);
- IBaseResource destination;
- switch (thePatchType) {
- case JSON_PATCH:
- destination = JsonPatchUtils.apply(getContext(), resourceToUpdate, thePatchBody);
- break;
- case XML_PATCH:
- destination = XmlPatchUtils.apply(getContext(), resourceToUpdate, thePatchBody);
- break;
- case FHIR_PATCH_XML:
- case FHIR_PATCH_JSON:
- default:
- IBaseParameters fhirPatchJson = theFhirPatchBody;
- new FhirPatch(getContext()).apply(resourceToUpdate, fhirPatchJson);
- destination = resourceToUpdate;
- break;
- }
-
- @SuppressWarnings("unchecked")
- T destinationCasted = (T) destination;
- myFhirContext.newJsonParser().setParserErrorHandler(new StrictErrorHandler()).encodeResourceToString(destinationCasted);
- return update(destinationCasted, null, true, theRequest);
- }
-
private boolean isDeleted(BaseHasResource entityToUpdate) {
return entityToUpdate.getDeleted() != null;
}
@@ -1205,7 +1149,7 @@ public abstract class BaseHapiFhirResourceDao extends B
throw createResourceGoneException(entity.get());
}
- T retVal = toResource(myResourceType, entity.get(), null, false);
+ T retVal = myJpaStorageResourceParser.toResource(myResourceType, entity.get(), null, false);
ourLog.debug("Processed read on {} in {}ms", thePid, w.getMillis());
return retVal;
@@ -1239,7 +1183,7 @@ public abstract class BaseHapiFhirResourceDao extends B
BaseHasResource entity = readEntity(theId, theRequest);
validateResourceType(entity);
- T retVal = toResource(myResourceType, entity, null, false);
+ T retVal = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
if (theDeletedOk == false) {
if (isDeleted(entity)) {
@@ -1293,7 +1237,7 @@ public abstract class BaseHapiFhirResourceDao extends B
ResourceTable entity = entityOpt.get();
try {
- T resource = (T) toResource(entity, false);
+ T resource = (T) myJpaStorageResourceParser.toResource(entity, false);
reindex(resource, entity);
} catch (BaseServerResponseException | DataFormatException e) {
myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED);
@@ -1375,6 +1319,13 @@ public abstract class BaseHapiFhirResourceDao extends B
return entity;
}
+ @Override
+ protected IBasePersistedResource readEntityLatestVersion(ResourcePersistentId thePersistentId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
+ return myEntityManager.find(ResourceTable.class, thePersistentId.getIdAsLong());
+ }
+
+
+ @Override
@Nonnull
protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequestDetails, getResourceName(), theId);
@@ -1691,8 +1642,6 @@ public abstract class BaseHapiFhirResourceDao extends B
}
private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
- StopWatch w = new StopWatch();
-
T resource = theResource;
preProcessResourceForStorage(resource);
@@ -1701,6 +1650,7 @@ public abstract class BaseHapiFhirResourceDao extends B
ResourceTable entity = null;
IIdType resourceId;
+ RestOperationTypeEnum update = RestOperationTypeEnum.UPDATE;
if (isNotBlank(theMatchUrl)) {
Set match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest, theResource);
if (match.size() > 1) {
@@ -1711,7 +1661,8 @@ public abstract class BaseHapiFhirResourceDao extends B
entity = myEntityManager.find(ResourceTable.class, pid.getId());
resourceId = entity.getIdDt();
} else {
- DaoMethodOutcome outcome = create(resource, null, thePerformIndexing, theTransactionDetails, theRequest);
+ RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName());
+ DaoMethodOutcome outcome = doCreateForPostOrPut(theRequest, resource, theMatchUrl, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails);
// Pre-cache the match URL
if (outcome.getPersistentId() != null) {
@@ -1750,86 +1701,17 @@ public abstract class BaseHapiFhirResourceDao extends B
}
if (create) {
- return doCreateForPostOrPut(resource, null, thePerformIndexing, theTransactionDetails, theRequest, requestPartitionId);
+ return doCreateForPostOrPut(theRequest, resource, null, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails);
}
}
- if (resourceId.hasVersionIdPart() && Long.parseLong(resourceId.getVersionIdPart()) != entity.getVersion()) {
- throw new ResourceVersionConflictException(Msg.code(989) + "Trying to update " + resourceId + " but this is not the current version");
- }
+ // Start
- if (resourceId.hasResourceType() && !resourceId.getResourceType().equals(getResourceName())) {
- throw new UnprocessableEntityException(Msg.code(990) + "Invalid resource ID[" + entity.getIdDt().toUnqualifiedVersionless() + "] of type[" + entity.getResourceType() + "] - Does not match expected [" + getResourceName() + "]");
- }
-
- IBaseResource oldResource;
- if (getConfig().isMassIngestionMode()) {
- oldResource = null;
- } else {
- oldResource = toResource(entity, false);
- }
-
- /*
- * Mark the entity as not deleted - This is also done in the actual updateInternal()
- * method later on so it usually doesn't matter whether we do it here, but in the
- * case of a transaction with multiple PUTs we don't get there until later so
- * having this here means that a transaction can have a reference in one
- * resource to another resource in the same transaction that is being
- * un-deleted by the transaction. Wacky use case, sure. But it's real.
- *
- * See SystemProviderR4Test#testTransactionReSavesPreviouslyDeletedResources
- * for a test that needs this.
- */
- boolean wasDeleted = isDeleted(entity);
- entity.setDeleted(null);
-
- /*
- * If we aren't indexing, that means we're doing this inside a transaction.
- * The transaction will do the actual storage to the database a bit later on,
- * after placeholder IDs have been replaced, by calling {@link #updateInternal}
- * directly. So we just bail now.
- */
- if (!thePerformIndexing) {
- resource.setId(entity.getIdDt().getValue());
- DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, resource).setCreated(wasDeleted);
- outcome.setPreviousResource(oldResource);
- if (!outcome.isNop()) {
- // Technically this may not end up being right since we might not increment if the
- // contents turn out to be the same
- outcome.setId(outcome.getId().withVersion(Long.toString(outcome.getId().getVersionIdPartAsLong() + 1)));
- }
- return outcome;
- }
-
- /*
- * Otherwise, we're not in a transaction
- */
- ResourceTable savedEntity = updateInternal(theRequest, resource, thePerformIndexing, theForceUpdateVersion, entity, resourceId, oldResource, theTransactionDetails);
-
- if (thePerformIndexing) {
- Collection extends BaseTag> tagList = Collections.emptyList();
- if (entity.isHasTags()) {
- tagList = entity.getTags();
- }
- long version = entity.getVersion();
- populateResourceMetadata(entity, false, tagList, version, getResourceType(), resource);
- }
-
- DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, resource).setCreated(wasDeleted);
-
- if (!thePerformIndexing) {
- IIdType id = getContext().getVersion().newIdType();
- id.setValue(entity.getIdDt().getValue());
- outcome.setId(id);
- }
-
- String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart());
- outcome.setOperationOutcome(createInfoOperationOutcome(msg));
-
- ourLog.debug(msg);
- return outcome;
+ return doUpdateForUpdateOrPatch(theRequest, resourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, resource, entity, update, theTransactionDetails);
}
+
+
/**
* Method for updating the historical version of the resource when a history version id is included in the request.
*
@@ -1844,8 +1726,8 @@ public abstract class BaseHapiFhirResourceDao extends B
// No need for indexing as this will update a non-current version of the resource which will not be searchable
preProcessResourceForStorage(theResource, theRequest, theTransactionDetails, false);
- BaseHasResource entity = null;
- BaseHasResource currentEntity = null;
+ BaseHasResource entity;
+ BaseHasResource currentEntity;
IIdType resourceId;
@@ -1874,12 +1756,10 @@ public abstract class BaseHapiFhirResourceDao extends B
entity.setDeleted(null);
boolean isUpdatingCurrent = resourceId.hasVersionIdPart() && Long.parseLong(resourceId.getVersionIdPart()) == currentEntity.getVersion();
IBasePersistedResource savedEntity = updateHistoryEntity(theRequest, theResource, currentEntity, entity, resourceId, theTransactionDetails, isUpdatingCurrent);
- DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, theResource).setCreated(wasDeleted);
+ DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, theResource, null, RestOperationTypeEnum.UPDATE).setCreated(wasDeleted);
- String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdate", outcome.getId(), w.getMillisAndRestart());
- outcome.setOperationOutcome(createInfoOperationOutcome(msg));
+ populateOperationOutcomeForUpdate(w, outcome, null, RestOperationTypeEnum.UPDATE);
- ourLog.debug(msg);
return outcome;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
index 78739652c06..20e882f7f0c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
@@ -1,12 +1,20 @@
package ca.uhn.fhir.jpa.dao;
+import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
+import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
+import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
@@ -25,6 +33,9 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
@@ -58,25 +69,36 @@ import java.util.stream.Collectors;
* #L%
*/
-public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao implements IFhirSystemDao {
+public abstract class BaseHapiFhirSystemDao extends BaseStorageDao implements IFhirSystemDao {
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class);
public ResourceCountCache myResourceCountsCache;
+
+ @PersistenceContext(type = PersistenceContextType.TRANSACTION)
+ protected EntityManager myEntityManager;
@Autowired
private TransactionProcessor myTransactionProcessor;
@Autowired
private ApplicationContext myApplicationContext;
+ @Autowired
+ private ExpungeService myExpungeService;
+ @Autowired
+ private IResourceTableDao myResourceTableDao;
+ @Autowired
+ private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
+ @Autowired
+ private IResourceTagDao myResourceTagDao;
+ @Autowired
+ private IInterceptorBroadcaster myInterceptorBroadcaster;
@VisibleForTesting
public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
myTransactionProcessor = theTransactionProcessor;
}
- @Override
@PostConstruct
public void start() {
- super.start();
myTransactionProcessor.setDao(this);
}
@@ -124,7 +146,7 @@ public abstract class BaseHapiFhirSystemDao extends B
@Override
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch();
- IBundleProvider retVal = super.history(theRequestDetails, null, null, theSince, theUntil, theOffset);
+ IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset);
ourLog.info("Processed global history in {}ms", w.getMillisAndRestart());
return retVal;
}
@@ -259,4 +281,25 @@ public abstract class BaseHapiFhirSystemDao extends B
return null;
}
+
+ @Override
+ protected IInterceptorBroadcaster getInterceptorBroadcaster() {
+ return myInterceptorBroadcaster;
+ }
+
+ @Override
+ protected DaoConfig getConfig() {
+ return myDaoConfig;
+ }
+
+ @Override
+ public FhirContext getContext() {
+ return myFhirContext;
+ }
+
+ @VisibleForTesting
+ public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
+ myDaoConfig = theDaoConfig;
+ }
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java
index d9568639f4c..430b458b5c7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java
@@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao;
* #L%
*/
+import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java
new file mode 100644
index 00000000000..dd0f8ab16b7
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java
@@ -0,0 +1,52 @@
+package ca.uhn.fhir.jpa.dao;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2022 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.jpa.model.entity.BaseTag;
+import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
+import ca.uhn.fhir.jpa.model.entity.ResourceTag;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
+import javax.annotation.Nullable;
+import java.util.Collection;
+
+public interface IJpaStorageResourceParser extends IStorageResourceParser {
+
+ /**
+ * Convert a storage entity into a FHIR resource model instance
+ */
+ R toResource(Class theResourceType, IBaseResourceEntity theEntity, Collection theTagList, boolean theForHistoryOperation);
+
+ /**
+ * Populate the metadata (Resource.meta.*) from a storage entity and other related
+ * objects pulled from the database
+ */
+ R populateResourceMetadata(IBaseResourceEntity theEntitySource, boolean theForHistoryOperation, @Nullable Collection extends BaseTag> tagList, long theVersion, R theResourceTarget);
+
+ /**
+ * Populates a resource model object's metadata (Resource.meta.*) based on the
+ * values from a stroage entity.
+ *
+ * @param theEntitySource The source
+ * @param theResourceTarget The target
+ */
+ void updateResourceMetadata(IBaseResourceEntity theEntitySource, IBaseResource theResourceTarget);
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
new file mode 100644
index 00000000000..9856d819579
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
@@ -0,0 +1,490 @@
+package ca.uhn.fhir.jpa.dao;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2022 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.FhirVersionEnum;
+import ca.uhn.fhir.context.RuntimeResourceDefinition;
+import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.jpa.api.config.DaoConfig;
+import ca.uhn.fhir.jpa.api.dao.IDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
+import ca.uhn.fhir.jpa.entity.PartitionEntity;
+import ca.uhn.fhir.jpa.entity.ResourceSearchView;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
+import ca.uhn.fhir.jpa.model.entity.BaseTag;
+import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
+import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
+import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
+import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.model.entity.ResourceTag;
+import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
+import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
+import ca.uhn.fhir.model.api.IResource;
+import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
+import ca.uhn.fhir.model.api.Tag;
+import ca.uhn.fhir.model.api.TagList;
+import ca.uhn.fhir.model.base.composite.BaseCodingDt;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.model.primitive.InstantDt;
+import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
+import ca.uhn.fhir.parser.DataFormatException;
+import ca.uhn.fhir.parser.IParser;
+import ca.uhn.fhir.parser.LenientErrorHandler;
+import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.util.MetaUtil;
+import org.apache.commons.lang3.Validate;
+import org.hl7.fhir.instance.model.api.IAnyResource;
+import org.hl7.fhir.instance.model.api.IBaseCoding;
+import org.hl7.fhir.instance.model.api.IBaseMetaType;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.Bundle;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+
+import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.cleanProvenanceSourceUri;
+import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.decodeResource;
+import static org.apache.commons.lang3.StringUtils.defaultString;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+
+public class JpaStorageResourceParser implements IJpaStorageResourceParser {
+ public static final LenientErrorHandler LENIENT_ERROR_HANDLER = new LenientErrorHandler(false).setErrorOnInvalidValue(false);
+ private static final Logger ourLog = LoggerFactory.getLogger(JpaStorageResourceParser.class);
+ @Autowired
+ private FhirContext myContext;
+ @Autowired
+ private DaoConfig myDaoConfig;
+ @Autowired
+ private IResourceHistoryTableDao myResourceHistoryTableDao;
+ @Autowired
+ private PartitionSettings myPartitionSettings;
+ @Autowired
+ private IPartitionLookupSvc myPartitionLookupSvc;
+
+ @Override
+ public IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation) {
+ RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
+ Class extends IBaseResource> resourceType = type.getImplementingClass();
+ return toResource(resourceType, (IBaseResourceEntity) theEntity, null, theForHistoryOperation);
+ }
+
+ @Override
+ public R toResource(Class theResourceType, IBaseResourceEntity theEntity, Collection theTagList, boolean theForHistoryOperation) {
+
+ // 1. get resource, it's encoding and the tags if any
+ byte[] resourceBytes;
+ String resourceText;
+ ResourceEncodingEnum resourceEncoding;
+ @Nullable
+ Collection extends BaseTag> tagList = Collections.emptyList();
+ long version;
+ String provenanceSourceUri = null;
+ String provenanceRequestId = null;
+
+ if (theEntity instanceof ResourceHistoryTable) {
+ ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
+ resourceBytes = history.getResource();
+ resourceText = history.getResourceTextVc();
+ resourceEncoding = history.getEncoding();
+ switch (myDaoConfig.getTagStorageMode()) {
+ case VERSIONED:
+ default:
+ if (history.isHasTags()) {
+ tagList = history.getTags();
+ }
+ break;
+ case NON_VERSIONED:
+ if (history.getResourceTable().isHasTags()) {
+ tagList = history.getResourceTable().getTags();
+ }
+ break;
+ case INLINE:
+ tagList = null;
+ }
+ version = history.getVersion();
+ if (history.getProvenance() != null) {
+ provenanceRequestId = history.getProvenance().getRequestId();
+ provenanceSourceUri = history.getProvenance().getSourceUri();
+ }
+ } else if (theEntity instanceof ResourceTable) {
+ ResourceTable resource = (ResourceTable) theEntity;
+ ResourceHistoryTable history;
+ if (resource.getCurrentVersionEntity() != null) {
+ history = resource.getCurrentVersionEntity();
+ } else {
+ version = theEntity.getVersion();
+ history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
+ ((ResourceTable) theEntity).setCurrentVersionEntity(history);
+
+ while (history == null) {
+ if (version > 1L) {
+ version--;
+ history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
+ } else {
+ return null;
+ }
+ }
+ }
+
+ resourceBytes = history.getResource();
+ resourceEncoding = history.getEncoding();
+ resourceText = history.getResourceTextVc();
+ switch (myDaoConfig.getTagStorageMode()) {
+ case VERSIONED:
+ case NON_VERSIONED:
+ if (resource.isHasTags()) {
+ tagList = resource.getTags();
+ }
+ break;
+ case INLINE:
+ tagList = null;
+ break;
+ }
+ version = history.getVersion();
+ if (history.getProvenance() != null) {
+ provenanceRequestId = history.getProvenance().getRequestId();
+ provenanceSourceUri = history.getProvenance().getSourceUri();
+ }
+ } else if (theEntity instanceof ResourceSearchView) {
+ // This is the search View
+ ResourceSearchView view = (ResourceSearchView) theEntity;
+ resourceBytes = view.getResource();
+ resourceText = view.getResourceTextVc();
+ resourceEncoding = view.getEncoding();
+ version = view.getVersion();
+ provenanceRequestId = view.getProvenanceRequestId();
+ provenanceSourceUri = view.getProvenanceSourceUri();
+ switch (myDaoConfig.getTagStorageMode()) {
+ case VERSIONED:
+ case NON_VERSIONED:
+ if (theTagList != null) {
+ tagList = theTagList;
+ }
+ break;
+ case INLINE:
+ tagList = null;
+ break;
+ }
+ } else {
+ // something wrong
+ return null;
+ }
+
+ // 2. get The text
+ String decodedResourceText = decodedResourceText(resourceBytes, resourceText, resourceEncoding);
+
+ // 3. Use the appropriate custom type if one is specified in the context
+ Class resourceType = determineTypeToParse(theResourceType, tagList);
+
+ // 4. parse the text to FHIR
+ R retVal = parseResource(theEntity, resourceEncoding, decodedResourceText, resourceType);
+
+ // 5. fill MetaData
+ retVal = populateResourceMetadata(theEntity, theForHistoryOperation, tagList, version, retVal);
+
+ // 6. Handle source (provenance)
+ populateResourceSource(provenanceSourceUri, provenanceRequestId, retVal);
+
+ // 7. Add partition information
+ populateResourcePartitionInformation(theEntity, retVal);
+
+ return retVal;
+ }
+
+ private void populateResourcePartitionInformation(IBaseResourceEntity theEntity, R retVal) {
+ if (myPartitionSettings.isPartitioningEnabled()) {
+ PartitionablePartitionId partitionId = theEntity.getPartitionId();
+ if (partitionId != null && partitionId.getPartitionId() != null) {
+ PartitionEntity persistedPartition = myPartitionLookupSvc.getPartitionById(partitionId.getPartitionId());
+ retVal.setUserData(Constants.RESOURCE_PARTITION_ID, persistedPartition.toRequestPartitionId());
+ } else {
+ retVal.setUserData(Constants.RESOURCE_PARTITION_ID, null);
+ }
+ }
+ }
+
+ private void populateResourceSource(String provenanceSourceUri, String provenanceRequestId, R retVal) {
+ if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
+ String sourceString = cleanProvenanceSourceUri(provenanceSourceUri)
+ + (isNotBlank(provenanceRequestId) ? "#" : "")
+ + defaultString(provenanceRequestId);
+
+ MetaUtil.setSource(myContext, retVal, sourceString);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum resourceEncoding, String decodedResourceText, Class resourceType) {
+ R retVal;
+ if (resourceEncoding != ResourceEncodingEnum.DEL) {
+
+ IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId());
+
+ try {
+ retVal = parser.parseResource(resourceType, decodedResourceText);
+ } catch (Exception e) {
+ StringBuilder b = new StringBuilder();
+ b.append("Failed to parse database resource[");
+ b.append(myContext.getResourceType(resourceType));
+ b.append("/");
+ b.append(theEntity.getIdDt().getIdPart());
+ b.append(" (pid ");
+ b.append(theEntity.getId());
+ b.append(", version ");
+ b.append(theEntity.getFhirVersion().name());
+ b.append("): ");
+ b.append(e.getMessage());
+ String msg = b.toString();
+ ourLog.error(msg, e);
+ throw new DataFormatException(Msg.code(928) + msg, e);
+ }
+
+ } else {
+
+ retVal = (R) myContext.getResourceDefinition(theEntity.getResourceType()).newInstance();
+
+ }
+ return retVal;
+ }
+
+ @SuppressWarnings("unchecked")
+ private Class determineTypeToParse(Class theResourceType, @Nullable Collection extends BaseTag> tagList) {
+ Class resourceType = theResourceType;
+ if (tagList != null) {
+ if (myContext.hasDefaultTypeForProfile()) {
+ for (BaseTag nextTag : tagList) {
+ if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
+ String profile = nextTag.getTag().getCode();
+ if (isNotBlank(profile)) {
+ Class extends IBaseResource> newType = myContext.getDefaultTypeForProfile(profile);
+ if (newType != null && theResourceType.isAssignableFrom(newType)) {
+ ourLog.debug("Using custom type {} for profile: {}", newType.getName(), profile);
+ resourceType = (Class) newType;
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ return resourceType;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public R populateResourceMetadata(IBaseResourceEntity theEntitySource, boolean theForHistoryOperation, @Nullable Collection extends BaseTag> tagList, long theVersion, R theResourceTarget) {
+ if (theResourceTarget instanceof IResource) {
+ IResource res = (IResource) theResourceTarget;
+ theResourceTarget = (R) populateResourceMetadataHapi(theEntitySource, tagList, theForHistoryOperation, res, theVersion);
+ } else {
+ IAnyResource res = (IAnyResource) theResourceTarget;
+ theResourceTarget = populateResourceMetadataRi(theEntitySource, tagList, theForHistoryOperation, res, theVersion);
+ }
+ return theResourceTarget;
+ }
+
+ @SuppressWarnings("unchecked")
+ private R populateResourceMetadataHapi(IBaseResourceEntity theEntity, @Nullable Collection extends BaseTag> theTagList, boolean theForHistoryOperation, R res, Long theVersion) {
+ R retVal = res;
+ if (theEntity.getDeleted() != null) {
+ res = (R) myContext.getResourceDefinition(res).newInstance();
+ retVal = res;
+ ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
+ if (theForHistoryOperation) {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.DELETE);
+ }
+ } else if (theForHistoryOperation) {
+ /*
+ * If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
+ */
+ Date published = theEntity.getPublished().getValue();
+ Date updated = theEntity.getUpdated().getValue();
+ if (published.equals(updated)) {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.POST);
+ } else {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.PUT);
+ }
+ }
+
+ res.setId(theEntity.getIdDt().withVersion(theVersion.toString()));
+
+ ResourceMetadataKeyEnum.VERSION.put(res, Long.toString(theEntity.getVersion()));
+ ResourceMetadataKeyEnum.PUBLISHED.put(res, theEntity.getPublished());
+ ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
+ IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
+
+ if (theTagList != null) {
+ if (theEntity.isHasTags()) {
+ TagList tagList = new TagList();
+ List securityLabels = new ArrayList<>();
+ List profiles = new ArrayList<>();
+ for (BaseTag next : theTagList) {
+ switch (next.getTag().getTagType()) {
+ case PROFILE:
+ profiles.add(new IdDt(next.getTag().getCode()));
+ break;
+ case SECURITY_LABEL:
+ IBaseCoding secLabel = (IBaseCoding) myContext.getVersion().newCodingDt();
+ secLabel.setSystem(next.getTag().getSystem());
+ secLabel.setCode(next.getTag().getCode());
+ secLabel.setDisplay(next.getTag().getDisplay());
+ securityLabels.add(secLabel);
+ break;
+ case TAG:
+ tagList.add(new Tag(next.getTag().getSystem(), next.getTag().getCode(), next.getTag().getDisplay()));
+ break;
+ }
+ }
+ if (tagList.size() > 0) {
+ ResourceMetadataKeyEnum.TAG_LIST.put(res, tagList);
+ }
+ if (securityLabels.size() > 0) {
+ ResourceMetadataKeyEnum.SECURITY_LABELS.put(res, toBaseCodingList(securityLabels));
+ }
+ if (profiles.size() > 0) {
+ ResourceMetadataKeyEnum.PROFILES.put(res, profiles);
+ }
+ }
+ }
+
+ return retVal;
+ }
+
+ @SuppressWarnings("unchecked")
+ private R populateResourceMetadataRi(IBaseResourceEntity theEntity, @Nullable Collection extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
+ R retVal = (R) res;
+ if (theEntity.getDeleted() != null) {
+ res = (IAnyResource) myContext.getResourceDefinition(res).newInstance();
+ retVal = (R) res;
+ ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
+ if (theForHistoryOperation) {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, Bundle.HTTPVerb.DELETE.toCode());
+ }
+ } else if (theForHistoryOperation) {
+ /*
+ * If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
+ */
+ Date published = theEntity.getPublished().getValue();
+ Date updated = theEntity.getUpdated().getValue();
+ if (published.equals(updated)) {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, Bundle.HTTPVerb.POST.toCode());
+ } else {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, Bundle.HTTPVerb.PUT.toCode());
+ }
+ }
+
+ res.getMeta().setLastUpdated(null);
+ res.getMeta().setVersionId(null);
+
+ updateResourceMetadata(theEntity, res);
+ res.setId(res.getIdElement().withVersion(theVersion.toString()));
+
+ res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
+ IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
+
+ if (theTagList != null) {
+ res.getMeta().getTag().clear();
+ res.getMeta().getProfile().clear();
+ res.getMeta().getSecurity().clear();
+ for (BaseTag next : theTagList) {
+ switch (next.getTag().getTagType()) {
+ case PROFILE:
+ res.getMeta().addProfile(next.getTag().getCode());
+ break;
+ case SECURITY_LABEL:
+ IBaseCoding sec = res.getMeta().addSecurity();
+ sec.setSystem(next.getTag().getSystem());
+ sec.setCode(next.getTag().getCode());
+ sec.setDisplay(next.getTag().getDisplay());
+ break;
+ case TAG:
+ IBaseCoding tag = res.getMeta().addTag();
+ tag.setSystem(next.getTag().getSystem());
+ tag.setCode(next.getTag().getCode());
+ tag.setDisplay(next.getTag().getDisplay());
+ break;
+ }
+ }
+ }
+
+ return retVal;
+ }
+
+ @Override
+ public void updateResourceMetadata(IBaseResourceEntity theEntitySource, IBaseResource theResourceTarget) {
+ IIdType id = theEntitySource.getIdDt();
+ if (myContext.getVersion().getVersion().isRi()) {
+ id = myContext.getVersion().newIdType().setValue(id.getValue());
+ }
+
+ if (id.hasResourceType() == false) {
+ id = id.withResourceType(theEntitySource.getResourceType());
+ }
+
+ theResourceTarget.setId(id);
+ if (theResourceTarget instanceof IResource) {
+ ResourceMetadataKeyEnum.VERSION.put((IResource) theResourceTarget, id.getVersionIdPart());
+ ResourceMetadataKeyEnum.UPDATED.put((IResource) theResourceTarget, theEntitySource.getUpdated());
+ } else {
+ IBaseMetaType meta = theResourceTarget.getMeta();
+ meta.setVersionId(id.getVersionIdPart());
+ meta.setLastUpdated(theEntitySource.getUpdatedDate());
+ }
+ }
+
+ private FhirContext getContext(FhirVersionEnum theVersion) {
+ Validate.notNull(theVersion, "theVersion must not be null");
+ if (theVersion == myContext.getVersion().getVersion()) {
+ return myContext;
+ }
+ return FhirContext.forCached(theVersion);
+ }
+
+ private static String decodedResourceText(byte[] resourceBytes, String resourceText, ResourceEncodingEnum resourceEncoding) {
+ String decodedResourceText;
+ if (resourceText != null) {
+ decodedResourceText = resourceText;
+ } else {
+ decodedResourceText = decodeResource(resourceBytes, resourceEncoding);
+ }
+ return decodedResourceText;
+ }
+
+ private static List toBaseCodingList(List theSecurityLabels) {
+ ArrayList retVal = new ArrayList<>(theSecurityLabels.size());
+ for (IBaseCoding next : theSecurityLabels) {
+ retVal.add((BaseCodingDt) next);
+ }
+ return retVal;
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceExpungeService.java
index 627606f9da0..bdbeb29bcb7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceExpungeService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceExpungeService.java
@@ -25,9 +25,10 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
-import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
@@ -41,19 +42,18 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
-import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
-import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
+import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
@@ -123,11 +123,13 @@ public class ResourceExpungeService implements IResourceExpungeService {
private DaoConfig myDaoConfig;
@Autowired
private MemoryCacheService myMemoryCacheService;
+ @Autowired
+ private IJpaStorageResourceParser myJpaStorageResourceParser;
@Override
@Transactional
public List findHistoricalVersionsOfNonDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) {
- if(isEmptyQuery(theRemainingCount)){
+ if (isEmptyQuery(theRemainingCount)) {
return Collections.EMPTY_LIST;
}
@@ -154,7 +156,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
@Override
@Transactional
public List findHistoricalVersionsOfDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) {
- if(isEmptyQuery(theRemainingCount)){
+ if (isEmptyQuery(theRemainingCount)) {
return Collections.EMPTY_LIST;
}
@@ -192,7 +194,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
* be optimized, but expunge is hopefully not frequently called on busy servers
* so it shouldn't be too big a deal.
*/
- TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization(){
+ TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
@Override
public void afterCommit() {
myMemoryCacheService.invalidateAllCaches();
@@ -220,8 +222,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
private void callHooks(RequestDetails theRequestDetails, AtomicInteger theRemainingCount, ResourceHistoryTable theVersion, IdDt theId) {
final AtomicInteger counter = new AtomicInteger();
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE, myInterceptorBroadcaster, theRequestDetails)) {
- IFhirResourceDao> resourceDao = myDaoRegistry.getResourceDao(theId.getResourceType());
- IBaseResource resource = resourceDao.toResource(theVersion, false);
+ IBaseResource resource = myJpaStorageResourceParser.toResource(theVersion, false);
HookParams params = new HookParams()
.add(AtomicInteger.class, counter)
.add(IIdType.class, theId)
@@ -324,7 +325,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
private void expungeHistoricalVersionsOfId(RequestDetails theRequestDetails, Long myResourceId, AtomicInteger theRemainingCount) {
Pageable page;
- synchronized (theRemainingCount){
+ synchronized (theRemainingCount) {
if (expungeLimitReached(theRemainingCount)) {
return;
}
@@ -348,7 +349,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
return new SliceImpl<>(Collections.singletonList(myVersion.getId()));
}
- private boolean isEmptyQuery(int theCount){
+ private boolean isEmptyQuery(int theCount) {
return theCount <= 0;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java
index 8338456f9af..2cd3586d538 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java
@@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@@ -41,7 +42,7 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc {
private final DeleteExpungeSqlBuilder myDeleteExpungeSqlBuilder;
private final IFulltextSearchSvc myFullTextSearchSvc;
- public DeleteExpungeSvcImpl(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, IFulltextSearchSvc theFullTextSearchSvc) {
+ public DeleteExpungeSvcImpl(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) {
myEntityManager = theEntityManager;
myDeleteExpungeSqlBuilder = theDeleteExpungeSqlBuilder;
myFullTextSearchSvc = theFullTextSearchSvc;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
index 075a9a6b64e..8cad69da494 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
@@ -31,7 +31,9 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
+import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
@@ -104,13 +106,15 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Autowired
private DaoConfig myDaoConfig;
+ @Autowired
+ private MemoryCacheService myMemoryCacheService;
+ @Autowired
+ private IJpaStorageResourceParser myJpaStorageResourceParser;
/*
* Non autowired fields (will be different for every instance
* of this class, since it's a prototype
*/
- @Autowired
- private MemoryCacheService myMemoryCacheService;
private Search mySearchEntity;
private String myUuid;
private SearchCacheStatusEnum myCacheStatus;
@@ -162,7 +166,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
resource = next;
IFhirResourceDao> dao = myDaoRegistry.getResourceDao(next.getResourceType());
- retVal.add(dao.toResource(resource, true));
+ retVal.add(myJpaStorageResourceParser.toResource(resource, true));
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
index 0e77506db02..5ca61fa53ac 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
@@ -23,11 +23,20 @@ package ca.uhn.fhir.jpa.search;
import ca.uhn.fhir.jpa.config.JpaConfig;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.entity.Search;
+import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
+import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.param.HistorySearchStyleEnum;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
+import java.util.Date;
+import java.util.UUID;
+
+import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
+
public class PersistedJpaBundleProviderFactory {
@Autowired
@@ -46,4 +55,28 @@ public class PersistedJpaBundleProviderFactory {
public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder) {
return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder);
}
+
+
+ public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset) {
+ return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null);
+ }
+
+ public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType) {
+ String resourceName = defaultIfBlank(theResourceType, null);
+
+ Search search = new Search();
+ search.setOffset(theOffset);
+ search.setDeleted(false);
+ search.setCreated(new Date());
+ search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive);
+ search.setUuid(UUID.randomUUID().toString());
+ search.setResourceType(resourceName);
+ search.setResourceId(theResourcePid);
+ search.setSearchType(SearchTypeEnum.HISTORY);
+ search.setStatus(SearchStatusEnum.FINISHED);
+ search.setHistorySearchStyle(searchParameterType);
+
+ return newInstance(theRequest, search);
+ }
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
index e5a8492ee3b..83b7a04e1c8 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
@@ -39,6 +39,7 @@ import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
@@ -144,20 +145,34 @@ public class SearchBuilder implements ISearchBuilder {
@Deprecated
public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE;
public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50;
+ public static final String RESOURCE_ID_ALIAS = "resource_id";
+ public static final String RESOURCE_VERSION_ALIAS = "resource_version";
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
private static final ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L);
private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid";
private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid";
private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType";
-
private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType";
private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion";
- public static final String RESOURCE_ID_ALIAS = "resource_id";
- public static final String RESOURCE_VERSION_ALIAS = "resource_version";
public static boolean myUseMaxPageSize50ForTest = false;
+ protected final IInterceptorBroadcaster myInterceptorBroadcaster;
+ protected final IResourceTagDao myResourceTagDao;
private final String myResourceName;
private final Class extends IBaseResource> myResourceType;
-
+ private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
+ private final SqlObjectFactory mySqlBuilderFactory;
+ private final HibernatePropertiesProvider myDialectProvider;
+ private final ModelConfig myModelConfig;
+ private final ISearchParamRegistry mySearchParamRegistry;
+ private final PartitionSettings myPartitionSettings;
+ private final DaoRegistry myDaoRegistry;
+ private final IResourceSearchViewDao myResourceSearchViewDao;
+ private final FhirContext myContext;
+ private final IIdHelperService myIdHelperService;
+ private final DaoConfig myDaoConfig;
+ private final IDao myCallingDao;
+ @PersistenceContext(type = PersistenceContextType.TRANSACTION)
+ protected EntityManager myEntityManager;
private List myAlsoIncludePids;
private CriteriaBuilder myCriteriaBuilder;
private SearchParameterMap myParams;
@@ -167,30 +182,12 @@ public class SearchBuilder implements ISearchBuilder {
private Set myPidSet;
private boolean myHasNextIteratorQuery = false;
private RequestPartitionId myRequestPartitionId;
-
- @PersistenceContext(type = PersistenceContextType.TRANSACTION)
- protected EntityManager myEntityManager;
@Autowired(required = false)
private IFulltextSearchSvc myFulltextSearchSvc;
@Autowired(required = false)
private IElasticsearchSvc myIElasticsearchSvc;
-
- private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
- private final SqlObjectFactory mySqlBuilderFactory;
- private final HibernatePropertiesProvider myDialectProvider;
- private final ModelConfig myModelConfig;
- private final ISearchParamRegistry mySearchParamRegistry;
- private final PartitionSettings myPartitionSettings;
- protected final IInterceptorBroadcaster myInterceptorBroadcaster;
- protected final IResourceTagDao myResourceTagDao;
- private final DaoRegistry myDaoRegistry;
- private final IResourceSearchViewDao myResourceSearchViewDao;
- private final FhirContext myContext;
- private final IIdHelperService myIdHelperService;
-
- private final DaoConfig myDaoConfig;
-
- private final IDao myCallingDao;
+ @Autowired
+ private IJpaStorageResourceParser myJpaStorageResourceParser;
/**
* Constructor
@@ -893,7 +890,7 @@ public class SearchBuilder implements ISearchBuilder {
IBaseResource resource = null;
if (next != null) {
- resource = myCallingDao.toResource(resourceType, next, tagMap.get(next.getId()), theForHistoryOperation);
+ resource = myJpaStorageResourceParser.toResource(resourceType, next, tagMap.get(next.getId()), theForHistoryOperation);
}
if (resource == null) {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java
index c0d8a24b752..e5d2ed42eb3 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java
@@ -38,6 +38,9 @@ import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.config.util.ConnectionPoolInfoProvider;
import ca.uhn.fhir.jpa.config.util.IConnectionPoolInfoProvider;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
+import ca.uhn.fhir.jpa.dao.IStorageResourceParser;
+import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
@@ -264,6 +267,8 @@ public class TermReadSvcImpl implements ITermReadSvc {
private CachingValidationSupport myCachingValidationSupport;
@Autowired
private VersionCanonicalizer myVersionCanonicalizer;
+ @Autowired
+ private IJpaStorageResourceParser myJpaStorageResourceParser;
@Override
public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) {
@@ -2434,7 +2439,7 @@ public class TermReadSvcImpl implements ITermReadSvc {
+ ForcedId.IDX_FORCEDID_TYPE_FID + " removed?");
IFhirResourceDao csDao = myDaoRegistry.getResourceDao("CodeSystem");
- IBaseResource cs = csDao.toResource(resultList.get(0), false);
+ IBaseResource cs = myJpaStorageResourceParser.toResource(resultList.get(0), false);
return Optional.of(cs);
}
@@ -2523,7 +2528,7 @@ public class TermReadSvcImpl implements ITermReadSvc {
private org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable) {
Class extends IBaseResource> type = getFhirContext().getResourceDefinition("ValueSet").getImplementingClass();
- IBaseResource valueSet = myDaoRegistry.getResourceDao("ValueSet").toResource(type, theResourceTable, null, false);
+ IBaseResource valueSet = myJpaStorageResourceParser.toResource(type, theResourceTable, null, false);
return myVersionCanonicalizer.valueSetToCanonical(valueSet);
}
diff --git a/hapi-fhir-jpaserver-cql/pom.xml b/hapi-fhir-jpaserver-cql/pom.xml
index 1e743702eef..5f2fbdca982 100644
--- a/hapi-fhir-jpaserver-cql/pom.xml
+++ b/hapi-fhir-jpaserver-cql/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
index 87ef97cb206..f010b9a9d85 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml
index f1f6bfd85f4..c8a1b5a549a 100644
--- a/hapi-fhir-jpaserver-mdm/pom.xml
+++ b/hapi-fhir-jpaserver-mdm/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index 61b7e9e962c..c0bc3542616 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java
index 845a78afb1b..435afb8c6c4 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java
@@ -20,14 +20,15 @@ package ca.uhn.fhir.jpa.model.cross;
* #L%
*/
-import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
-import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
-import java.util.Date;
-
public interface IBasePersistedResource extends IResourceLookup {
IIdType getIdDt();
+ long getVersion();
+
+ boolean isDeleted();
+
+ void setNotDeleted();
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java
index d035b99b551..77a5ac5b578 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ModelConfig.java
@@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
+import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.dstu2.model.Subscription;
@@ -79,6 +80,30 @@ public class ModelConfig {
private boolean myDefaultSearchParamsCanBeOverridden = true;
private Set mySupportedSubscriptionTypes = new HashSet<>();
private boolean myCrossPartitionSubscription = false;
+
+ /**
+ * If set to true, attempt to map terminology for bulk export jobs using the
+ * logic in
+ * {@link ResponseTerminologyTranslationSvc}. Default is false
.
+ *
+ * @since 6.3.0
+ */
+ public boolean isNormalizeTerminologyForBulkExportJobs() {
+ return myNormalizeTerminologyForBulkExportJobs;
+ }
+
+ /**
+ * If set to true, attempt to map terminology for bulk export jobs using the
+ * logic in
+ * {@link ResponseTerminologyTranslationSvc}. Default is false
.
+ *
+ * @since 6.3.0
+ */
+ public void setNormalizeTerminologyForBulkExportJobs(boolean theNormalizeTerminologyForBulkExportJobs) {
+ myNormalizeTerminologyForBulkExportJobs = theNormalizeTerminologyForBulkExportJobs;
+ }
+
+ private boolean myNormalizeTerminologyForBulkExportJobs = false;
private String myEmailFromAddress = "noreply@unknown.com";
private String myWebsocketContextPath = DEFAULT_WEBSOCKET_CONTEXT_PATH;
/**
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
index c8d815d26f2..9b6161704fd 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
@@ -212,6 +212,16 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return myResourceVersion;
}
+ @Override
+ public boolean isDeleted() {
+ return getDeleted() != null;
+ }
+
+ @Override
+ public void setNotDeleted() {
+ setDeleted(null);
+ }
+
public void setVersion(long theVersion) {
myResourceVersion = theVersion;
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
index c03b26edd0a..441688b1158 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
@@ -548,6 +548,16 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
return myVersion;
}
+ @Override
+ public boolean isDeleted() {
+ return getDeleted() != null;
+ }
+
+ @Override
+ public void setNotDeleted() {
+ setDeleted(null);
+ }
+
public void setVersion(long theVersion) {
myVersion = theVersion;
}
diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml
index 803a246028b..89f8286a7ae 100755
--- a/hapi-fhir-jpaserver-searchparam/pom.xml
+++ b/hapi-fhir-jpaserver-searchparam/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml
index 9d201a1d717..41893251dcd 100644
--- a/hapi-fhir-jpaserver-subscription/pom.xml
+++ b/hapi-fhir-jpaserver-subscription/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml
index 853e2b8dc34..51e074993f9 100644
--- a/hapi-fhir-jpaserver-test-dstu2/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml
index 5977ad827b6..713e990cb54 100644
--- a/hapi-fhir-jpaserver-test-dstu3/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java
index adbf0b66a8c..17bc0f606ff 100644
--- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java
@@ -48,14 +48,16 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("http://foo");
- IIdType id = myCodeSystemDao.create(codeSystem).getId().toUnqualifiedVersionless();
+ IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualifiedVersionless();
- myCodeSystemDao.delete(id);
+ myCodeSystemDao.delete(id, mySrd);
codeSystem = new CodeSystem();
codeSystem.setUrl("http://foo");
- myCodeSystemDao.update(codeSystem, "Patient?name=FAM").getId().toUnqualifiedVersionless();
+ IIdType id2 = myCodeSystemDao.update(codeSystem, "CodeSystem?url=http://foo", mySrd).getId();
+ assertNotEquals(id.getIdPart(), id2.getIdPart());
+ assertEquals("1", id2.getVersionIdPart());
}
@Test
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
index 41e0eb898fc..7b07c15b765 100644
--- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
@@ -1117,7 +1117,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
String encoded = myFhirContext.newXmlParser().encodeResourceToString(response.getOperationOutcome());
ourLog.info(encoded);
assertThat(encoded, containsString(
- "
"));
+ ""));
} finally {
IOUtils.closeQuietly(resp);
}
@@ -1214,7 +1214,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
- assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in "));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s). Took "));
} finally {
response.close();
}
@@ -1241,7 +1241,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
- assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL \"Patient?name=testDeleteResourceConditional1\". Deletion failed."));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL \"Patient?name=testDeleteResourceConditional1\". Nothing has been deleted."));
} finally {
response.close();
}
@@ -1322,7 +1322,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
MethodOutcome resp = myClient.delete().resourceById(id).execute();
OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome();
- assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in "));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s). Took"));
}
/**
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java
index 566a57d9ef4..943f46035b7 100644
--- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java
@@ -192,14 +192,11 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
@Test
- public void testPatchUsingJsonPatch_Transaction() throws Exception {
- String methodName = "testPatchUsingJsonPatch_Transaction";
+ public void testPatchUsingJsonPatch_Transaction() {
IIdType pid1;
{
Patient patient = new Patient();
patient.setActive(true);
- patient.addIdentifier().setSystem("urn:system").setValue("0");
- patient.addName().setFamily(methodName).addGiven("Joe");
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
@@ -224,6 +221,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
.getRequest().setUrl(pid1.getValue()).setMethod(HTTPVerb.PUT);
Bundle bundle = ourClient.transaction().withBundle(input).execute();
+ ourLog.info("Response: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle));
//Validate over all bundle response entry contents.
assertThat(bundle.getType(), is(equalTo(Bundle.BundleType.TRANSACTIONRESPONSE)));
diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml
index 6dfc992e56a..41156640b1f 100644
--- a/hapi-fhir-jpaserver-test-r4/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDaoTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDaoTest.java
index 279c16c19aa..ad9c58d08c0 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDaoTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDaoTest.java
@@ -64,11 +64,11 @@ import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class BaseHapiFhirDaoTest {
- private static class TestDao extends BaseHapiFhirDao {
+ private static class TestDao extends BaseHapiFhirResourceDao {
@Nullable
@Override
- protected String getResourceName() {
+ public String getResourceName() {
return "Patient";
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InterceptorTest.java
index 7910be524ca..2b881a7a20f 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InterceptorTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InterceptorTest.java
@@ -171,7 +171,7 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
p = new Patient();
p.addName().setFamily("PATIENT3");
- id2 = myPatientDao.update(p, "Patient?family=ZZZ", mySrd).getId().getIdPartAsLong();
+ id2 = myPatientDao.update(p, "Patient?family=PATIENT3", mySrd).getId().getIdPartAsLong();
assertNotEquals(id, id2);
detailsCapt = ArgumentCaptor.forClass(RequestDetails.class);
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TagsTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TagsTest.java
index 01ab0be5d8a..a54994dc87d 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TagsTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TagsTest.java
@@ -4,8 +4,10 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.gclient.TokenClientParam;
+import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Enumerations;
@@ -24,6 +26,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.when;
@SuppressWarnings({"Duplicates"})
public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
@@ -111,6 +115,119 @@ public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
patient = (Patient) myPatientDao.update(patient, mySrd).getResource();
myCaptureQueriesListener.logAllQueries();
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
+ assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
+ assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
+
+ // Read it back
+
+ patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
+ assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
+ assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
+
+ }
+
+ /**
+ * Make sure tags are preserved
+ */
+ @Test
+ public void testDeleteResourceWithTags_NonVersionedTags_InTransaction() {
+ initializeNonVersioned();
+ when(mySrd.getHeader(eq(Constants.HEADER_PREFER))).thenReturn("return=representation");
+ Bundle input, output;
+
+ // Delete
+
+ runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
+ input = new BundleBuilder(myFhirContext)
+ .addTransactionDeleteEntry(new IdType("Patient/A"))
+ .andThen()
+ .getBundleTyped();
+ output = mySystemDao.transaction(mySrd, input);
+ IIdType outcomeId = new IdType(output.getEntry().get(0).getResponse().getLocation());
+ assertEquals("3", outcomeId.getVersionIdPart());
+ runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
+
+ // Make sure $meta-get can fetch the tags of the deleted resource
+
+ Meta meta = myPatientDao.metaGetOperation(Meta.class, new IdType("Patient/A"), mySrd);
+ assertThat(toProfiles(meta).toString(), toProfiles(meta), contains("http://profile2"));
+ assertThat(toTags(meta).toString(), toTags(meta), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
+ assertEquals("3", meta.getVersionId());
+
+ // Revive and verify
+
+ Patient patient = new Patient();
+ patient.setId("A");
+ patient.getMeta().addProfile("http://profile3");
+ patient.setActive(true);
+
+ myCaptureQueriesListener.clear();
+
+ input = new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(patient)
+ .andThen()
+ .getBundleTyped();
+ output = mySystemDao.transaction(mySrd, input);
+ patient = (Patient) output.getEntry().get(0).getResource();
+ assert patient != null;
+
+ assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
+ assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
+ myCaptureQueriesListener.logAllQueries();
+ runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
+
+ // Read it back
+
+ patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
+ assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
+ assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
+
+ }
+
+ /**
+ * Make sure tags are preserved
+ */
+ @Test
+ public void testDeleteResourceWithTags_VersionedTags_InTransaction() {
+ initializeVersioned();
+ when(mySrd.getHeader(eq(Constants.HEADER_PREFER))).thenReturn("return=representation");
+ Bundle input, output;
+
+ // Delete
+
+ runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
+ input = new BundleBuilder(myFhirContext)
+ .addTransactionDeleteEntry(new IdType("Patient/A"))
+ .andThen()
+ .getBundleTyped();
+ output = mySystemDao.transaction(mySrd, input);
+ runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
+
+ // Make sure $meta-get can fetch the tags of the deleted resource
+
+ Meta meta = myPatientDao.metaGetOperation(Meta.class, new IdType("Patient/A"), mySrd);
+ assertThat(toProfiles(meta).toString(), toProfiles(meta), contains("http://profile2"));
+ assertThat(toTags(meta).toString(), toTags(meta), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
+
+ // Revive and verify
+
+ Patient patient = new Patient();
+ patient.setId("A");
+ patient.getMeta().addProfile("http://profile3");
+ patient.setActive(true);
+
+ myCaptureQueriesListener.clear();
+ input = new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(patient)
+ .andThen()
+ .getBundleTyped();
+ output = mySystemDao.transaction(mySrd, input);
+ patient = (Patient) output.getEntry().get(0).getResource();
+ assert patient != null;
+ myCaptureQueriesListener.logAllQueries();
+ runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
+ assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
+ assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
// Read it back
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java
index f0acc71253c..849c909ff6f 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java
@@ -469,22 +469,7 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
}
@Test
- public void testReCreateMatchResource() {
-
- CodeSystem codeSystem = new CodeSystem();
- codeSystem.setUrl("http://foo");
- IIdType id = myCodeSystemDao.create(codeSystem).getId().toUnqualifiedVersionless();
-
- myCodeSystemDao.delete(id);
-
- codeSystem = new CodeSystem();
- codeSystem.setUrl("http://foo");
- myCodeSystemDao.update(codeSystem, "Patient?name=FAM").getId().toUnqualifiedVersionless();
-
- }
-
- @Test
- public void testUpdateAndGetHistoryResource() throws InterruptedException {
+ public void testUpdateAndGetHistoryResource() {
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester").addGiven("Joe");
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java
index a710a0f0a58..f7a28ade850 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java
@@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.interceptor;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
+import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants;
@@ -36,9 +37,8 @@ import static org.junit.jupiter.api.Assertions.fail;
public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceProviderR4Test {
- private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseTerminologyTranslationInterceptorTest.class);
public static final String TEST_OBV_FILTER = "Observation?status=amended";
-
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseTerminologyTranslationInterceptorTest.class);
@Autowired
private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor;
@@ -55,6 +55,7 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
public void afterEach() {
myResponseTerminologyTranslationInterceptor.clearMappingSpecifications();
myServer.unregisterInterceptor(myResponseTerminologyTranslationInterceptor);
+ myModelConfig.setNormalizeTerminologyForBulkExportJobs(new ModelConfig().isNormalizeTerminologyForBulkExportJobs());
}
@Test
@@ -139,6 +140,8 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
@Test
public void testBulkExport_TerminologyTranslation_MappingFound() {
+ myModelConfig.setNormalizeTerminologyForBulkExportJobs(true);
+
// Create some resources to load
Observation observation = new Observation();
observation.setStatus(Observation.ObservationStatus.AMENDED);
@@ -157,6 +160,8 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
@Test
public void testBulkExport_TerminologyTranslation_MappingNotNeeded() {
+ myModelConfig.setNormalizeTerminologyForBulkExportJobs(true);
+
// Create some resources to load
Observation observation = new Observation();
observation.setStatus(Observation.ObservationStatus.AMENDED);
@@ -176,6 +181,8 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
@Test
public void testBulkExport_TerminologyTranslation_NoMapping() {
+ myModelConfig.setNormalizeTerminologyForBulkExportJobs(true);
+
// Create some resources to load
Observation observation = new Observation();
observation.setStatus(Observation.ObservationStatus.AMENDED);
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderMeaningfulOutcomeMessageR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderMeaningfulOutcomeMessageR4Test.java
new file mode 100644
index 00000000000..c740cb191ee
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderMeaningfulOutcomeMessageR4Test.java
@@ -0,0 +1,686 @@
+package ca.uhn.fhir.jpa.provider.r4;
+
+import ca.uhn.fhir.i18n.HapiLocalizer;
+import ca.uhn.fhir.jpa.api.config.DaoConfig;
+import ca.uhn.fhir.jpa.dao.BaseStorageDao;
+import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
+import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
+import ca.uhn.fhir.rest.api.PreferReturnEnum;
+import ca.uhn.fhir.util.BundleBuilder;
+import org.hamcrest.Matcher;
+import org.hl7.fhir.r4.model.BooleanType;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.CodeType;
+import org.hl7.fhir.r4.model.IdType;
+import org.hl7.fhir.r4.model.OperationOutcome;
+import org.hl7.fhir.r4.model.Parameters;
+import org.hl7.fhir.r4.model.Patient;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static ca.uhn.fhir.util.TestUtil.sleepAtLeast;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.containsInRelativeOrder;
+import static org.hamcrest.Matchers.hasItem;
+import static org.hamcrest.Matchers.hasItems;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.matchesPattern;
+import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.stringContainsInOrder;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+
+@SuppressWarnings("Duplicates")
+public class ResourceProviderMeaningfulOutcomeMessageR4Test extends BaseResourceProviderR4Test {
+
+ @BeforeEach
+ @Override
+ public void before() throws Exception {
+ super.before();
+ HapiLocalizer.setOurFailOnMissingMessage(true);
+ myDaoConfig.setAllowMultipleDelete(true);
+ }
+
+ @AfterEach
+ @Override
+ public void after() {
+ myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
+ }
+
+ @Test
+ public void testCreateUpdateDelete() {
+
+ // Initial Create-with-client-assigned-ID
+
+ Patient p = new Patient();
+ p.setId("Patient/A");
+ p.setActive(true);
+ OperationOutcome oo = (OperationOutcome) myClient
+ .update()
+ .resource(p)
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateAsCreate", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_AS_CREATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Update with change
+
+ p.setId("Patient/A");
+ p.setActive(false);
+ oo = (OperationOutcome) myClient
+ .update()
+ .resource(p)
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdate", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Update with no change
+
+ p.setId("Patient/A");
+ oo = (OperationOutcome) myClient
+ .update()
+ .resource(p)
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Initial create: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateNoChange", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Delete
+
+ oo = (OperationOutcome) myClient
+ .delete()
+ .resourceById("Patient", "A")
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Delete: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Delete with no change
+
+ oo = (OperationOutcome) myClient
+ .delete()
+ .resourceById("Patient", "A")
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Delete: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("deleteResourceAlreadyDeleted"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testCreateUpdateDelete_InTransaction() {
+
+ // Initial Create-with-client-assigned-ID
+
+ Patient p = new Patient();
+ p.setId("Patient/A");
+ p.setActive(true);
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(p)
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Initial create: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateAsCreate", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_AS_CREATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Update with change
+
+ p.setId("Patient/A");
+ p.setActive(false);
+ input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(p)
+ .andThen()
+ .getBundle();
+ output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdate"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Update with no change
+
+ p.setId("Patient/A");
+ input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(p)
+ .andThen()
+ .getBundle();
+ output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateNoChange"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Delete
+
+ input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionDeleteEntry("Patient", "A")
+ .andThen()
+ .getBundle();
+ output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ // Delete With No Change
+
+ input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionDeleteEntry("Patient", "A")
+ .andThen()
+ .getBundle();
+ output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("deleteResourceAlreadyDeleted"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testCreate_InTransaction() {
+
+ Patient p = new Patient();
+ p.setActive(true);
+
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionCreateEntry(p)
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulCreate", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CREATE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalCreate_NoMatch_InTransaction() {
+
+ Patient p = new Patient();
+ p.setActive(true);
+
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionCreateEntry(p)
+ .conditional("Patient?active=true")
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(),
+ matchesPattern("Successfully conditionally created resource \".*\". No existing resources matched URL \"Patient\\?active=true\". Took [0-9]+ms."));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CREATE_NO_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalCreate_WithMatch_InTransaction() {
+ createPatient(withActiveTrue());
+
+ Patient p = new Patient();
+ p.setActive(true);
+
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionCreateEntry(p)
+ .conditional("Patient?active=true")
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulCreateConditionalWithMatch"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalUpdate_NoMatch() {
+ Patient p = new Patient();
+ p.setActive(true);
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .update()
+ .resource(p)
+ .conditionalByUrl("Patient?active=true")
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoMatch", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalUpdate_WithMatchAndChange() {
+ createPatient(withActiveTrue());
+
+ Patient p = new Patient();
+ p.setActive(true);
+ p.addName().setFamily("Test");
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .update()
+ .resource(p)
+ .conditionalByUrl("Patient?active=true")
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalWithMatch", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalUpdate_WithMatchNoChange() {
+ createPatient(withActiveTrue());
+
+ Patient p = new Patient();
+ p.setActive(true);
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .update()
+ .resource(p)
+ .conditionalByUrl("Patient?active=true")
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoChangeWithMatch", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalUpdate_NoMatch_InTransaction() {
+ Patient p = new Patient();
+ p.setActive(true);
+
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(p)
+ .conditional("Patient?active=true")
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoMatch", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalUpdate_WithMatchAndChange_InTransaction() {
+ createPatient(withActiveTrue());
+
+ Patient p = new Patient();
+ p.setActive(true);
+ p.addName().setFamily("Test");
+
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(p)
+ .conditional("Patient?active=true")
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalWithMatch"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testConditionalUpdate_WithMatchNoChange_InTransaction() {
+ createPatient(withActiveTrue());
+
+ Patient p = new Patient();
+ p.setActive(true);
+
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionUpdateEntry(p)
+ .conditional("Patient?active=true")
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Create {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulUpdateConditionalNoChangeWithMatch"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testPatch_WithChanges() {
+ createPatient(withId("A"), withActiveTrue());
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .patch()
+ .withFhirPatch(patch)
+ .withId("Patient/A")
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatch", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testPatch_NoChanges() {
+ createPatient(withId("A"), withActiveFalse());
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .patch()
+ .withFhirPatch(patch)
+ .withId("Patient/A")
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchNoChange", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+
+ @Test
+ public void testPatch_Conditional_MatchWithChanges() {
+ createPatient(withId("A"), withActiveTrue(), withBirthdate("2022-01-01"));
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .patch()
+ .withFhirPatch(patch)
+ .conditionalByUrl("Patient?birthdate=2022-01-01")
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditional", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testPatch_Conditional_MatchNoChanges() {
+ createPatient(withId("A"), withActiveFalse(), withBirthdate("2022-01-01"));
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .patch()
+ .withFhirPatch(patch)
+ .conditionalByUrl("Patient?birthdate=2022-01-01")
+ .prefer(PreferReturnEnum.OPERATION_OUTCOME)
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditionalNoChange", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+
+ @Test
+ public void testPatch_WithChanges_InTransaction() {
+ createPatient(withId("A"), withActiveTrue());
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ BundleBuilder bb = new BundleBuilder(myFhirContext);
+ bb.addTransactionFhirPatchEntry(new IdType("Patient/A"), patch);
+
+ Bundle response = myClient
+ .transaction()
+ .withBundle((Bundle)bb.getBundle())
+ .execute();
+ OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatch"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testPatch_NoChanges_InTransaction() {
+ createPatient(withId("A"), withActiveFalse());
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ BundleBuilder bb = new BundleBuilder(myFhirContext);
+ bb.addTransactionFhirPatchEntry(new IdType("Patient/A"), patch);
+
+ Bundle response = myClient
+ .transaction()
+ .withBundle((Bundle)bb.getBundle())
+ .execute();
+ OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchNoChange"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+
+ @Test
+ public void testPatch_Conditional_MatchWithChanges_InTransaction() {
+ createPatient(withId("A"), withActiveTrue(), withBirthdate("2022-01-01"));
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ BundleBuilder bb = new BundleBuilder(myFhirContext);
+ bb.addTransactionFhirPatchEntry(patch).conditional("Patient?birthdate=2022-01-01");
+
+ Bundle response = myClient
+ .transaction()
+ .withBundle((Bundle)bb.getBundle())
+ .execute();
+ OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditional"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testPatch_Conditional_MatchNoChanges_InTransaction() {
+ createPatient(withId("A"), withActiveFalse(), withBirthdate("2022-01-01"));
+
+ Parameters patch = createPatchToSetPatientActiveFalse();
+
+ BundleBuilder bb = new BundleBuilder(myFhirContext);
+ bb.addTransactionFhirPatchEntry(patch).conditional("Patient?birthdate=2022-01-01");
+
+ Bundle response = myClient
+ .transaction()
+ .withBundle((Bundle)bb.getBundle())
+ .execute();
+ OperationOutcome oo = (OperationOutcome) response.getEntry().get(0).getResponse().getOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulPatchConditionalNoChange"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+ @Test
+ public void testMultiDelete_NoneFound() {
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .delete()
+ .resourceConditionalByUrl("Patient?active=true")
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("unableToDeleteNotFound"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testMultiDelete_SomeFound() {
+
+ createPatient(withActiveTrue());
+ createPatient(withActiveTrue());
+ createPatient(withActiveTrue());
+
+ OperationOutcome oo = (OperationOutcome) myClient
+ .delete()
+ .resourceConditionalByUrl("Patient?active=true")
+ .execute()
+ .getOperationOutcome();
+ ourLog.info("Update: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ @Test
+ public void testMultiDelete_SomeFound_InTransaction() {
+ createPatient(withActiveTrue());
+ createPatient(withActiveTrue());
+ createPatient(withActiveTrue());
+
+ Bundle input = (Bundle) new BundleBuilder(myFhirContext)
+ .addTransactionDeleteEntryConditional("Patient?active=true")
+ .andThen()
+ .getBundle();
+ Bundle output = myClient
+ .transaction()
+ .withBundle(input)
+ .execute();
+ ourLog.info("Delete {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
+ OperationOutcome oo = (OperationOutcome) output.getEntry().get(0).getResponse().getOutcome();
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), matchesHapiMessage("successfulDeletes", "successfulTimingSuffix"));
+ assertEquals(StorageResponseCodeEnum.SUCCESSFUL_DELETE.name(), oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode());
+ assertEquals(StorageResponseCodeEnum.SYSTEM, oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem());
+
+ }
+
+ private static Parameters createPatchToSetPatientActiveFalse() {
+ Parameters patch = new Parameters();
+ Parameters.ParametersParameterComponent op = patch.addParameter().setName("operation");
+ op.addPart().setName("type").setValue(new CodeType("replace"));
+ op.addPart().setName("path").setValue(new CodeType("Patient.active"));
+ op.addPart().setName("value").setValue(new BooleanType(false));
+ return patch;
+ }
+
+
+ private Matcher matchesHapiMessage(String... theMessageKey) {
+ StringBuilder joinedPattern = new StringBuilder();
+
+ for (var next : theMessageKey) {
+ String qualifiedKey = BaseStorageDao.class.getName() + "." + next;
+ String pattern = myFhirContext.getLocalizer().getFormatString(qualifiedKey);
+ assertTrue(isNotBlank(pattern));
+ pattern = pattern
+ .replace("\"", "\\\"")
+ .replace("(", "\\(")
+ .replace(")", "\\)")
+ .replace("[", "\\[")
+ .replace("]", "\\]")
+ .replace(".", "\\.")
+ .replaceAll("\\{[0-9]+}", ".*");
+
+ if (joinedPattern.length() > 0) {
+ joinedPattern.append(' ');
+ }
+ joinedPattern.append(pattern);
+
+ }
+
+ return matchesPattern(joinedPattern.toString());
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
index 98355bd7caa..97b7a4f0b30 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
@@ -1,7 +1,9 @@
package ca.uhn.fhir.jpa.provider.r4;
+import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
+import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
@@ -14,6 +16,7 @@ import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
+import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
@@ -46,6 +49,7 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
+import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.ClasspathUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
@@ -69,6 +73,7 @@ import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
+import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.instance.model.api.IAnyResource;
@@ -165,7 +170,6 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
-import javax.sql.DataSource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
@@ -263,6 +267,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
public void before() throws Exception {
super.before();
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
+ HapiLocalizer.setOurFailOnMissingMessage(true);
myDaoConfig.setAllowMultipleDelete(true);
myClient.registerInterceptor(myCapturingInterceptor);
@@ -292,7 +297,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
@Test
- public void createResourceSearchParameter_withExpressionMetaSecurity_succeeds(){
+ public void createResourceSearchParameter_withExpressionMetaSecurity_succeeds() {
SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("resource-security");
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
@@ -310,7 +315,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
@Test
- public void createSearchParameter_with2Expressions_succeeds(){
+ public void createSearchParameter_with2Expressions_succeeds() {
SearchParameter searchParameter = new SearchParameter();
@@ -320,7 +325,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setExpression("Patient.gender|Person.gender");
- MethodOutcome result= myClient.create().resource(searchParameter).execute();
+ MethodOutcome result = myClient.create().resource(searchParameter).execute();
assertEquals(true, result.getCreated());
@@ -757,7 +762,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
-
@Test
public void testCreateWithNoBody() throws IOException {
@@ -817,7 +821,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
-
@BeforeEach
public void beforeDisableResultReuse() {
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
@@ -831,7 +834,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals(200, resp.getStatusLine().getStatusCode());
}
-
private ArrayList genResourcesOfType(Bundle theRes, Class extends IBaseResource> theClass) {
ArrayList retVal = new ArrayList<>();
for (BundleEntryComponent next : theRes.getEntry()) {
@@ -974,7 +976,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
-
@Test
public void testCreateAndReadBackResourceWithContainedReferenceToContainer() {
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
@@ -1039,7 +1040,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals("#", loc.getManagingOrganization().getReference());
}
-
@Test
public void testCountParam() {
List resources = new ArrayList<>();
@@ -1099,7 +1099,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertNull(p.getBirthDate());
}
-
/**
* See #438
*/
@@ -1648,7 +1647,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
String encoded = myFhirContext.newXmlParser().encodeResourceToString(response.getOperationOutcome());
ourLog.info(encoded);
assertThat(encoded, containsString(
- "
"));
+ ""
+ ));
}
}
@@ -1711,7 +1711,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
}
-
@Test
@Disabled
public void testQuery() throws IOException {
@@ -1752,7 +1751,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
- assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in "));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s). Took"));
} finally {
response.close();
}
@@ -1779,7 +1778,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(resp);
OperationOutcome oo = myFhirContext.newXmlParser().parseResource(OperationOutcome.class, resp);
- assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL \"Patient?name=testDeleteResourceConditional1\". Deletion failed."));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Unable to find resource matching URL"));
} finally {
response.close();
}
@@ -1852,17 +1851,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
- @Test
- public void testDeleteReturnsOperationOutcome() {
- Patient p = new Patient();
- p.addName().setFamily("FAM");
- IIdType id = myClient.create().resource(p).execute().getId().toUnqualifiedVersionless();
-
- MethodOutcome resp = myClient.delete().resourceById(id).execute();
- OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome();
- assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in "));
- }
-
@Test
public void testDeleteNonExistingResourceReturnsOperationOutcome() {
String resourceType = "Patient";
@@ -1881,7 +1869,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
MethodOutcome resp = myClient.delete().resourceById(id).execute();
OperationOutcome oo = (OperationOutcome) resp.getOperationOutcome();
- assertThat(oo.getIssueFirstRep().getDiagnostics(), startsWith("Successfully deleted 1 resource(s) in "));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), containsString("Successfully deleted 1 resource(s)."));
+ assertThat(oo.getIssueFirstRep().getDiagnostics(), containsString("Took "));
resp = myClient.delete().resourceById(id).execute();
oo = (OperationOutcome) resp.getOperationOutcome();
@@ -2349,7 +2338,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertThat(ids, not(containsInRelativeOrder(c3Id)));
}
-
@Test
public void testEverythingPatientTypeWithIdParameter() {
String methodName = "testEverythingPatientTypeWithIdParameter";
@@ -2967,7 +2955,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
}
-
@Test
public void testValidateResourceContainingProfileDeclarationDoesntResolve() throws IOException {
Observation input = new Observation();
@@ -2988,7 +2975,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
}
-
@SuppressWarnings("unused")
@Test
public void testFullTextSearch() throws Exception {
@@ -3397,31 +3383,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
- // private void delete(String theResourceType, String theParamName, String theParamValue) {
- // Bundle resources;
- // do {
- // IQuery forResource = ourClient.search().forResource(theResourceType);
- // if (theParamName != null) {
- // forResource = forResource.where(new StringClientParam(theParamName).matches().value(theParamValue));
- // }
- // resources = forResource.execute();
- // for (IResource next : resources.toListOfResources()) {
- // ourLog.info("Deleting resource: {}", next.getId());
- // ourClient.delete().resource(next).execute();
- // }
- // } while (resources.size() > 0);
- // }
- //
- // private void deleteToken(String theResourceType, String theParamName, String theParamSystem, String theParamValue)
- // {
- // Bundle resources = ourClient.search().forResource(theResourceType).where(new
- // TokenClientParam(theParamName).exactly().systemAndCode(theParamSystem, theParamValue)).execute();
- // for (IResource next : resources.toListOfResources()) {
- // ourLog.info("Deleting resource: {}", next.getId());
- // ourClient.delete().resource(next).execute();
- // }
- // }
-
@Test
public void testIdAndVersionInBodyForCreate() throws IOException {
String methodName = "testIdAndVersionInBodyForCreate";
@@ -3464,6 +3425,31 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
}
+ // private void delete(String theResourceType, String theParamName, String theParamValue) {
+ // Bundle resources;
+ // do {
+ // IQuery forResource = ourClient.search().forResource(theResourceType);
+ // if (theParamName != null) {
+ // forResource = forResource.where(new StringClientParam(theParamName).matches().value(theParamValue));
+ // }
+ // resources = forResource.execute();
+ // for (IResource next : resources.toListOfResources()) {
+ // ourLog.info("Deleting resource: {}", next.getId());
+ // ourClient.delete().resource(next).execute();
+ // }
+ // } while (resources.size() > 0);
+ // }
+ //
+ // private void deleteToken(String theResourceType, String theParamName, String theParamSystem, String theParamValue)
+ // {
+ // Bundle resources = ourClient.search().forResource(theResourceType).where(new
+ // TokenClientParam(theParamName).exactly().systemAndCode(theParamSystem, theParamValue)).execute();
+ // for (IResource next : resources.toListOfResources()) {
+ // ourLog.info("Deleting resource: {}", next.getId());
+ // ourClient.delete().resource(next).execute();
+ // }
+ // }
+
@Test
public void testIdAndVersionInBodyForUpdate() throws IOException {
String methodName = "testIdAndVersionInBodyForUpdate";
@@ -4190,7 +4176,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals("hugs", enc.getReasonCodeFirstRep().getCodingFirstRep().getCode());
}
-
@Test
public void testTerminologyWithCompleteCs_SearchForConceptIn() throws Exception {
@@ -5093,7 +5078,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals(2, ids.size());
}
-
@Test
public void testSearchWithNormalizedQuantitySearchSupported_DegreeFahrenheit() throws Exception {
@@ -5244,7 +5228,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
}
-
@Test
public void testSearchReusesResultsDisabled() {
List resources = new ArrayList<>();
@@ -5863,7 +5846,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals(oid2, list.get(3));
}
-
@Test
public void testSearchWithMissing() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
@@ -7475,7 +7457,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
// ensure the patient has the expected overall history
Bundle result = myClient.history()
- .onInstance("Patient/"+patientId)
+ .onInstance("Patient/" + patientId)
.returnBundle(Bundle.class)
.execute();
@@ -7508,8 +7490,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertTrue(timeBetweenUpdates.before(dateV2));
List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertEquals(2, resultIds.size());
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/1"));
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/1"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
}
private void verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
@@ -7518,17 +7500,17 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertTrue(timeBetweenUpdates.after(dateV2));
List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertEquals(1, resultIds.size());
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
}
private void verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
- Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, - delayInMs);
+ Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, -delayInMs);
assertTrue(timeBetweenUpdates.before(dateV1));
assertTrue(timeBetweenUpdates.before(dateV2));
List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertEquals(2, resultIds.size());
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/1"));
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/1"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
}
private void verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
@@ -7537,7 +7519,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertTrue(timeBetweenUpdates.before(dateV2));
List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates));
assertEquals(1, resultIds.size());
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
}
private void verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
@@ -7549,13 +7531,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
private void verifySinceBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
- Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, - delayInMs);
+ Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, -delayInMs);
assertTrue(timeBetweenUpdates.before(dateV1));
assertTrue(timeBetweenUpdates.before(dateV2));
List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates));
assertEquals(2, resultIds.size());
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/1"));
- assertTrue(resultIds.contains("Patient/"+ patientId +"/_history/2"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/1"));
+ assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
}
@Test
@@ -7686,7 +7668,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
return new InstantDt(theDate).getValueAsString();
}
-
public IIdType createPatientWithIndexAtOrganization(String theMethodName, String theIndex, IIdType theOrganizationId) {
Patient p1 = new Patient();
p1.addName().setFamily(theMethodName + theIndex);
@@ -7728,39 +7709,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
@Nested
public class MissingSearchParameterTests {
- private interface XtoY {
- Y doTask(X theInput);
- }
-
- private static class MissingSearchTestParameters {
- /**
- * The setting for IndexMissingFields
- */
- public final DaoConfig.IndexEnabledEnum myEnableMissingFieldsValue;
-
- /**
- * Whether to use :missing=true/false
- */
- public final boolean myIsMissing;
-
- /**
- * Whether or not the field is populated or not.
- * True -> populate field.
- * False -> not populated
- */
- public final boolean myIsValuePresentOnResource;
-
- public MissingSearchTestParameters(
- DaoConfig.IndexEnabledEnum theEnableMissingFields,
- boolean theIsMissing,
- boolean theHasField
- ) {
- myEnableMissingFieldsValue = theEnableMissingFields;
- myIsMissing = theIsMissing;
- myIsValuePresentOnResource = theHasField;
- }
- }
-
private IParser myParser;
@BeforeEach
@@ -7827,30 +7775,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
//@formatter:on
}
- /**
- * The method that generates parameters for tests
- */
- private static Stream provideParameters() {
- return Stream.of(
- // 1
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, true)),
- // 2
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, false)),
- // 3
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, true)),
- // 4
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, false)),
- // 5
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, true)),
- // 6
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, true)),
- // 7
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, false)),
- // 8
- Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, false))
- );
- }
-
/**
* Runs the actual test for whichever search parameter and given inputs we want.
*/
@@ -8036,6 +7960,63 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
);
});
}
+
+ private interface XtoY {
+ Y doTask(X theInput);
+ }
+
+ private static class MissingSearchTestParameters {
+ /**
+ * The setting for IndexMissingFields
+ */
+ public final DaoConfig.IndexEnabledEnum myEnableMissingFieldsValue;
+
+ /**
+ * Whether to use :missing=true/false
+ */
+ public final boolean myIsMissing;
+
+ /**
+ * Whether or not the field is populated or not.
+ * True -> populate field.
+ * False -> not populated
+ */
+ public final boolean myIsValuePresentOnResource;
+
+ public MissingSearchTestParameters(
+ DaoConfig.IndexEnabledEnum theEnableMissingFields,
+ boolean theIsMissing,
+ boolean theHasField
+ ) {
+ myEnableMissingFieldsValue = theEnableMissingFields;
+ myIsMissing = theIsMissing;
+ myIsValuePresentOnResource = theHasField;
+ }
+ }
+
+ /**
+ * The method that generates parameters for tests
+ */
+ private static Stream provideParameters() {
+ return Stream.of(
+ // 1
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, true)),
+ // 2
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, false)),
+ // 3
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, false, true)),
+ // 4
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.ENABLED, true, false)),
+ // 5
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, true)),
+ // 6
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, true)),
+ // 7
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, true, false)),
+ // 8
+ Arguments.of(new MissingSearchTestParameters(DaoConfig.IndexEnabledEnum.DISABLED, false, false))
+ );
+ }
}
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java
index 1bc194ce46d..88096d22434 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java
@@ -17,6 +17,7 @@ import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl;
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
@@ -142,6 +143,8 @@ public class GiantTransactionPerfTest {
private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer;
@Mock
private IIdHelperService myIdHelperService;
+ @Mock
+ private IJpaStorageResourceParser myJpaStorageResourceParser;
@AfterEach
public void afterEach() {
@@ -192,7 +195,6 @@ public class GiantTransactionPerfTest {
mySystemDao = new FhirSystemDaoR4();
mySystemDao.setTransactionProcessorForUnitTest(myTransactionProcessor);
mySystemDao.setDaoConfigForUnitTest(myDaoConfig);
- mySystemDao.setPartitionSettingsForUnitTest(myPartitionSettings);
mySystemDao.start();
when(myAppCtx.getBean(eq(IInstanceValidatorModule.class))).thenReturn(myInstanceValidatorSvc);
@@ -265,6 +267,7 @@ public class GiantTransactionPerfTest {
myEobDao.setDaoConfigForUnitTest(myDaoConfig);
myEobDao.setIdHelperSvcForUnitTest(myIdHelperService);
myEobDao.setPartitionSettingsForUnitTest(myPartitionSettings);
+ myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser);
myEobDao.start();
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ITermReadSvcTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ITermReadSvcTest.java
index 3c9a0ee995b..a9610ed7f54 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ITermReadSvcTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ITermReadSvcTest.java
@@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@@ -85,6 +86,8 @@ class ITermReadSvcTest {
private DaoRegistry myDaoRegistry;
@Mock
private IFhirResourceDao myFhirResourceDao;
+ @Mock
+ private IJpaStorageResourceParser myJpaStorageResourceParser;
@Nested
@@ -93,6 +96,7 @@ class ITermReadSvcTest {
@BeforeEach
public void setup() {
ReflectionTestUtils.setField(testedClass, "myTermValueSetDao", myTermValueSetDao);
+ ReflectionTestUtils.setField(testedClass, "myJpaStorageResourceParser", myJpaStorageResourceParser);
}
@Test
@@ -214,6 +218,7 @@ class ITermReadSvcTest {
@BeforeEach
public void setup() {
ReflectionTestUtils.setField(testedClass, "myEntityManager", myEntityManager);
+ ReflectionTestUtils.setField(testedClass, "myJpaStorageResourceParser", myJpaStorageResourceParser);
}
@@ -245,13 +250,13 @@ class ITermReadSvcTest {
when(myEntityManager.createQuery(anyString()).getResultList())
.thenReturn(Lists.newArrayList(resource1));
when(myDaoRegistry.getResourceDao("CodeSystem")).thenReturn(myFhirResourceDao);
- when(myFhirResourceDao.toResource(resource1, false)).thenReturn(myCodeSystemResource);
+ when(myJpaStorageResourceParser.toResource(resource1, false)).thenReturn(myCodeSystemResource);
testedClass.readCodeSystemByForcedId("a-cs-id");
- verify(myFhirResourceDao, times(1)).toResource(any(), eq(false));
+ verify(myJpaStorageResourceParser, times(1)).toResource(any(), eq(false));
}
}
diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml
index 77e75be98f6..c9a374220a0 100644
--- a/hapi-fhir-jpaserver-test-r4b/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4b/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml
index d0c8c2d8971..646ea335bc8 100644
--- a/hapi-fhir-jpaserver-test-r5/pom.xml
+++ b/hapi-fhir-jpaserver-test-r5/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml
index 2fb8f88c176..7c61d62309d 100644
--- a/hapi-fhir-jpaserver-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
index 95aa4261ed8..de2b7741bd1 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
@@ -535,12 +535,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
myPartitionSettings.setPartitioningEnabled(false);
}
- @Order(Integer.MIN_VALUE)
- @BeforeEach
- public void beforeResetInterceptors() {
- // nothing
- }
-
@Override
@Order(Integer.MAX_VALUE)
@AfterEach
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java
index a0385253d91..267e84de563 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java
@@ -146,8 +146,8 @@ public class PatientReindexTestHelper {
patient.getNameFirstRep().setFamily("Family-"+i).addGiven("Given-"+i);
patient.getIdentifierFirstRep().setValue("Id-"+i);
myPatientDao.create(patient, requestDetails);
+ TestUtil.sleepOneClick();
}
- TestUtil.sleepOneClick();
}
private void validatePersistedPatients(int theExpectedNumPatients, long theExpectedVersion) {
diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java
index 1a8877d9ccc..6a3e342753e 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/LoincFullLoadR4SandboxIT.java
@@ -2,6 +2,8 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
+import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
+import ca.uhn.fhir.jpa.dao.IStorageResourceParser;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
@@ -192,6 +194,8 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
private int askAtOrderEntryCount = 0;
private int validatedPropertiesCounter = 0;
private int validatedMapToEntriesCounter = 0;
+ @Autowired
+ private IJpaStorageResourceParser myJpaStorageResourceParser;
@BeforeEach
void setUp() {
@@ -606,7 +610,7 @@ public class LoincFullLoadR4SandboxIT extends BaseJpaTest {
List vsList = (List) q1.getResultList();
assertEquals(1, vsList.size());
long vsLongId = vsList.get(0).getId();
- ValueSet vs = (ValueSet) myValueSetDao.toResource(vsList.get(0), false);
+ ValueSet vs = (ValueSet) myJpaStorageResourceParser.toResource(vsList.get(0), false);
assertNotNull(vs);
Query q2 = myEntityManager.createQuery("from TermValueSet where myResource = " + vsLongId);
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
index e0a9b637dfc..89e64e9ff83 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
+++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml
index 74f23de9987..3850ac69145 100644
--- a/hapi-fhir-server-mdm/pom.xml
+++ b/hapi-fhir-server-mdm/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml
index 6a3f7b9d826..bfc77612bd4 100644
--- a/hapi-fhir-server-openapi/pom.xml
+++ b/hapi-fhir-server-openapi/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml
index 4b6c5c3cbe0..51cd8dc0cb2 100644
--- a/hapi-fhir-server/pom.xml
+++ b/hapi-fhir-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.3.1-SNAPSHOT
+ 6.3.2-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java
index 12b39e4c71e..f08bcd7f209 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java
@@ -63,6 +63,7 @@ public class TransactionDetails {
private Map myUserData;
private ListMultimap myDeferredInterceptorBroadcasts;
private EnumSet