diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java
index dd11be7a8d1..a5b61eacef2 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java
@@ -6,6 +6,9 @@ import java.net.URL;
import java.net.URLDecoder;
import java.net.URLEncoder;
+import ca.uhn.fhir.rest.server.Constants;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+
/*
* #%L
* HAPI FHIR - Core Library
@@ -158,5 +161,92 @@ public class UrlUtil {
throw new Error("UTF-8 not supported on this platform");
}
}
+
+ //@formatter:off
+ /**
+ * Parse a URL in one of the following forms:
+ *
+ * - [Resource Type]?[Search Params]
+ *
- [Resource Type]/[Resource ID]
+ *
- [Resource Type]/[Resource ID]/_history/[Version ID]
+ *
+ */
+ //@formatter:on
+ public static UrlParts parseUrl(String theUrl) {
+ UrlParts retVal = new UrlParts();
+
+ int nextStart = 0;
+ boolean nextIsHistory = false;
+
+ for (int idx = 0; idx < theUrl.length(); idx++) {
+ char nextChar = theUrl.charAt(idx);
+ boolean atEnd = (idx + 1) == theUrl.length();
+ if (nextChar == '?' || nextChar == '/' || atEnd) {
+ int endIdx = atEnd ? idx + 1 : idx;
+ String nextSubstring = theUrl.substring(nextStart, endIdx);
+ if (retVal.getResourceType() == null) {
+ retVal.setResourceType(nextSubstring);
+ } else if (retVal.getResourceId() == null) {
+ retVal.setResourceId(nextSubstring);
+ } else if (nextIsHistory) {
+ retVal.setVersionId(nextSubstring);
+ } else {
+ if (nextSubstring.equals(Constants.URL_TOKEN_HISTORY)) {
+ nextIsHistory = true;
+ } else {
+ throw new InvalidRequestException("Invalid FHIR resource URL: " + theUrl);
+ }
+ }
+ if (nextChar == '?') {
+ if (theUrl.length() > idx + 1) {
+ retVal.setParams(theUrl.substring(idx + 1, theUrl.length()));
+ }
+ break;
+ }
+ nextStart = idx + 1;
+ }
+ }
+
+ return retVal;
+ }
+
+ public static class UrlParts {
+ private String myParams;
+ private String myResourceId;
+ private String myResourceType;
+ private String myVersionId;
+
+ public String getParams() {
+ return myParams;
+ }
+
+ public String getResourceId() {
+ return myResourceId;
+ }
+
+ public String getResourceType() {
+ return myResourceType;
+ }
+
+ public String getVersionId() {
+ return myVersionId;
+ }
+
+ public void setParams(String theParams) {
+ myParams = theParams;
+ }
+
+ public void setResourceId(String theResourceId) {
+ myResourceId = theResourceId;
+ }
+
+ public void setResourceType(String theResourceType) {
+ myResourceType = theResourceType;
+ }
+
+ public void setVersionId(String theVersionId) {
+ myVersionId = theVersionId;
+ }
+ }
}
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 9237ebd9ce4..df590b1415e 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -284,72 +284,6 @@
alphabetical
-
- de.juplo
- hibernate4-maven-plugin
-
- true
- SCRIPT
- ${skip-hib4}
-
-
-
-
- o10g
-
- export
-
- test
-
- org.hibernate.dialect.Oracle10gDialect
- ${project.build.directory}/schema_oracle_10g.sql
-
-
-
- derby
-
- export
-
- test
-
- org.hibernate.dialect.DerbyTenSevenDialect
- ${project.build.directory}/schema_derby.sql
-
-
-
- hsql
-
- export
-
- test
-
- org.hibernate.dialect.HSQLDialect
- ${project.build.directory}/schema_hsql.sql
-
-
-
- mysql5
-
- export
-
- test
-
- org.hibernate.dialect.MySQL5Dialect
- ${project.build.directory}/schema_mysql_5.sql
-
-
-
-
ca.uhn.hapi.fhir
hapi-tinder-plugin
@@ -427,6 +361,79 @@
true
+
+ DIST
+
+
+
+ de.juplo
+ hibernate4-maven-plugin
+
+ true
+ SCRIPT
+ ${skip-hib4}
+
+
+
+
+ o10g
+
+ export
+
+ test
+
+ org.hibernate.dialect.Oracle10gDialect
+ ${project.build.directory}/schema_oracle_10g.sql
+
+
+
+ derby
+
+ export
+
+ test
+
+ org.hibernate.dialect.DerbyTenSevenDialect
+ ${project.build.directory}/schema_derby.sql
+
+
+
+ hsql
+
+ export
+
+ test
+
+ org.hibernate.dialect.HSQLDialect
+ ${project.build.directory}/schema_hsql.sql
+
+
+
+ mysql5
+
+ export
+
+ test
+
+ org.hibernate.dialect.MySQL5Dialect
+ ${project.build.directory}/schema_mysql_5.sql
+
+
+
+
+
+
+
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index f0c4503a175..21f7c0866ba 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -87,6 +87,7 @@ import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
+import ca.uhn.fhir.jpa.entity.SubscriptionCandidateResource;
import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.util.StopWatch;
@@ -520,7 +521,7 @@ public abstract class BaseHapiFhirDao implements IDao {
}
throw e;
}
- IResource resource = (IResource) toResource(type.getImplementingClass(), next);
+ IResource resource = (IResource) toResource(type.getImplementingClass(), next, true);
retVal.add(resource);
}
return retVal;
@@ -558,12 +559,6 @@ public abstract class BaseHapiFhirDao implements IDao {
}
protected void populateResourceIntoEntity(IResource theResource, ResourceTable theEntity) {
-
- if (theEntity.getPublished().isEmpty()) {
- theEntity.setPublished(new Date());
- }
- theEntity.setUpdated(new Date());
-
theEntity.setResourceType(toResourceName(theResource));
List refs = myContext.newTerser().getAllPopulatedChildElementsOfType(theResource, BaseResourceReferenceDt.class);
@@ -936,13 +931,13 @@ public abstract class BaseHapiFhirDao implements IDao {
return retVal;
}
- protected IBaseResource toResource(BaseHasResource theEntity) {
+ protected IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
- return toResource(type.getImplementingClass(), theEntity);
+ return toResource(type.getImplementingClass(), theEntity, theForHistoryOperation);
}
@SuppressWarnings("unchecked")
- protected R toResource(Class theResourceType, BaseHasResource theEntity) {
+ protected R toResource(Class theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation) {
String resourceText = null;
switch (theEntity.getEncoding()) {
case JSON:
@@ -983,9 +978,23 @@ public abstract class BaseHapiFhirDao implements IDao {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
retVal = (R) res;
ResourceMetadataKeyEnum.DELETED_AT.put(res, new InstantDt(theEntity.getDeleted()));
- ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.DELETE);
+ if (theForHistoryOperation) {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.DELETE);
+ }
+ } else if (theForHistoryOperation) {
+ /*
+ * If the create and update times match, this was when the resource was created
+ * so we should mark it as a POST. Otherwise, it's a PUT.
+ */
+ Date published = theEntity.getPublished().getValue();
+ Date updated = theEntity.getUpdated().getValue();
+ if (published.equals(updated)) {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.POST);
+ } else {
+ ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(res, BundleEntryTransactionMethodEnum.PUT);
+ }
}
-
+
res.setId(theEntity.getIdDt());
ResourceMetadataKeyEnum.VERSION.put(res, Long.toString(theEntity.getVersion()));
@@ -1063,25 +1072,28 @@ public abstract class BaseHapiFhirDao implements IDao {
}
}
- protected ResourceTable updateEntity(final IResource theResource, ResourceTable entity, boolean theUpdateHistory, Date theDeletedTimestampOrNull) {
- return updateEntity(theResource, entity, theUpdateHistory, theDeletedTimestampOrNull, true, true);
+ protected ResourceTable updateEntity(final IResource theResource, ResourceTable entity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, Date theUpdateTime) {
+ return updateEntity(theResource, entity, theUpdateHistory, theDeletedTimestampOrNull, true, true, theUpdateTime);
}
@SuppressWarnings("unchecked")
- protected ResourceTable updateEntity(final IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion) {
-
- if (theEntity.getPublished() == null) {
- theEntity.setPublished(new Date());
- }
-
+ protected ResourceTable updateEntity(final IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion, Date theUpdateTime) {
+
+ /*
+ * This should be the very first thing..
+ */
if (theResource != null) {
- validateResourceForStorage((T) theResource);
+ validateResourceForStorage((T) theResource, theEntity);
String resourceType = myContext.getResourceDefinition(theResource).getName();
if (isNotBlank(theEntity.getResourceType()) && !theEntity.getResourceType().equals(resourceType)) {
throw new UnprocessableEntityException("Existing resource ID[" + theEntity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + theEntity.getResourceType() + "] - Cannot update with [" + resourceType + "]");
}
}
+ if (theEntity.getPublished() == null) {
+ theEntity.setPublished(theUpdateTime);
+ }
+
if (theUpdateHistory) {
final ResourceHistoryTable historyEntry = theEntity.toHistory();
myEntityManager.persist(historyEntry);
@@ -1159,7 +1171,7 @@ public abstract class BaseHapiFhirDao implements IDao {
links = extractResourceLinks(theEntity, theResource);
populateResourceIntoEntity(theResource, theEntity);
- theEntity.setUpdated(new Date());
+ theEntity.setUpdated(theUpdateTime);
theEntity.setLanguage(theResource.getLanguage().getValue());
theEntity.setParamsString(stringParams);
theEntity.setParamsStringPopulated(stringParams.isEmpty() == false);
@@ -1178,11 +1190,11 @@ public abstract class BaseHapiFhirDao implements IDao {
theEntity.setResourceLinks(links);
theEntity.setHasLinks(links.isEmpty() == false);
theEntity.setIndexStatus(INDEX_STATUS_INDEXED);
-
+
} else {
populateResourceIntoEntity(theResource, theEntity);
- theEntity.setUpdated(new Date());
+ theEntity.setUpdated(theUpdateTime);
theEntity.setLanguage(theResource.getLanguage().getValue());
theEntity.setIndexStatus(null);
@@ -1197,8 +1209,24 @@ public abstract class BaseHapiFhirDao implements IDao {
myEntityManager.persist(theEntity.getForcedId());
}
+ postPersist(theEntity, (T) theResource);
+
} else {
theEntity = myEntityManager.merge(theEntity);
+
+ postUpdate(theEntity, (T) theResource);
+ }
+
+ /*
+ * When subscription is enabled, for each resource we store we also
+ * store a subscription candidate. These are examined by the subscription
+ * module and then deleted.
+ */
+ if (myConfig.isSubscriptionEnabled() && thePerformIndexing) {
+ SubscriptionCandidateResource candidate = new SubscriptionCandidateResource();
+ candidate.setResource(theEntity);
+ candidate.setResourceVersion(theEntity.getVersion());
+ myEntityManager.persist(candidate);
}
if (thePerformIndexing) {
@@ -1289,6 +1317,30 @@ public abstract class BaseHapiFhirDao implements IDao {
return theEntity;
}
+ /**
+ * Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the
+ * first time.
+ *
+ * @param theEntity
+ * The resource
+ * @param theResource The resource being persisted
+ */
+ protected void postUpdate(ResourceTable theEntity, T theResource) {
+ // nothing
+ }
+
+ /**
+ * Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the
+ * first time.
+ *
+ * @param theEntity
+ * The resource
+ * @param theResource The resource being persisted
+ */
+ protected void postPersist(ResourceTable theEntity, T theResource) {
+ // nothing
+ }
+
/**
* This method is invoked immediately before storing a new resource, or an update to an existing resource to allow
* the DAO to ensure that it is valid for persistence. By default, checks for the "subsetted" tag and rejects
@@ -1296,8 +1348,9 @@ public abstract class BaseHapiFhirDao implements IDao {
*
* @param theResource
* The resource that is about to be persisted
+ * @param theEntityToSave TODO
*/
- protected void validateResourceForStorage(T theResource) {
+ protected void validateResourceForStorage(T theResource, ResourceTable theEntityToSave) {
IResource res = (IResource) theResource;
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(res);
if (tagList != null) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index bd9977b30e4..f3baeae53c2 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -142,7 +142,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
- private EntityManager myEntityManager;
+ protected EntityManager myEntityManager;
@Autowired
private PlatformTransactionManager myPlatformTransactionManager;
@@ -1004,7 +1004,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
}
}
- return doCreate(theResource, theIfNoneExist, thePerformIndexing);
+ return doCreate(theResource, theIfNoneExist, thePerformIndexing, new Date());
}
private Predicate createCompositeParamPart(CriteriaBuilder builder, Root from, RuntimeSearchParam left, IQueryParameterType leftValue) {
@@ -1268,7 +1268,8 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
ActionRequestDetails requestDetails = new ActionRequestDetails(theId, theId.getResourceType());
notifyInterceptors(RestOperationTypeEnum.DELETE, requestDetails);
- ResourceTable savedEntity = updateEntity(null, entity, true, new Date());
+ Date updateTime = new Date();
+ ResourceTable savedEntity = updateEntity(null, entity, true, updateTime, updateTime);
notifyWriteCompleted();
@@ -1298,14 +1299,15 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
notifyInterceptors(RestOperationTypeEnum.DELETE, requestDetails);
// Perform delete
- ResourceTable savedEntity = updateEntity(null, entity, true, new Date());
+ Date updateTime = new Date();
+ ResourceTable savedEntity = updateEntity(null, entity, true, updateTime, updateTime);
notifyWriteCompleted();
ourLog.info("Processed delete on {} in {}ms", theUrl, w.getMillisAndRestart());
return toMethodOutcome(savedEntity, null);
}
- private DaoMethodOutcome doCreate(T theResource, String theIfNoneExist, boolean thePerformIndexing) {
+ private DaoMethodOutcome doCreate(T theResource, String theIfNoneExist, boolean thePerformIndexing, Date theUpdateTime) {
StopWatch w = new StopWatch();
preProcessResourceForStorage(theResource);
@@ -1346,7 +1348,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
ActionRequestDetails requestDetails = new ActionRequestDetails(theResource.getId(), toResourceName(theResource), theResource);
notifyInterceptors(RestOperationTypeEnum.CREATE, requestDetails);
- updateEntity(theResource, entity, false, null, thePerformIndexing, true);
+ updateEntity(theResource, entity, false, null, thePerformIndexing, true, theUpdateTime);
DaoMethodOutcome outcome = toMethodOutcome(entity, theResource).setCreated(true);
@@ -1419,7 +1421,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
try {
BaseHasResource entity = readEntity(theId.toVersionless(), false);
validateResourceType(entity);
- currentTmp = toResource(myResourceType, entity);
+ currentTmp = toResource(myResourceType, entity, true);
if (ResourceMetadataKeyEnum.UPDATED.get(currentTmp).after(end.getValue())) {
currentTmp = null;
}
@@ -1496,7 +1498,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
if (retVal.size() == maxResults) {
break;
}
- retVal.add(toResource(myResourceType, next));
+ retVal.add(toResource(myResourceType, next, true));
}
return retVal;
@@ -1527,7 +1529,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
return retVal;
}
- private void loadResourcesByPid(Collection theIncludePids, List theResourceListToPopulate, Set theRevIncludedPids) {
+ private void loadResourcesByPid(Collection theIncludePids, List theResourceListToPopulate, Set theRevIncludedPids, boolean theForHistoryOperation) {
if (theIncludePids.isEmpty()) {
return;
}
@@ -1546,7 +1548,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
for (ResourceTable next : q.getResultList()) {
Class extends IBaseResource> resourceType = getContext().getResourceDefinition(next.getResourceType()).getImplementingClass();
- IResource resource = (IResource) toResource(resourceType, next);
+ IResource resource = (IResource) toResource(resourceType, next, theForHistoryOperation);
Integer index = position.get(next.getId());
if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", next.getId());
@@ -1827,7 +1829,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
BaseHasResource entity = readEntity(theId);
validateResourceType(entity);
- T retVal = toResource(myResourceType, entity);
+ T retVal = toResource(myResourceType, entity, false);
InstantDt deleted = ResourceMetadataKeyEnum.DELETED_AT.get(retVal);
if (deleted != null && !deleted.isEmpty()) {
@@ -2065,7 +2067,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
// Execute the query and make sure we return distinct results
List retVal = new ArrayList();
- loadResourcesByPid(pidsSubList, retVal, revIncludedPids);
+ loadResourcesByPid(pidsSubList, retVal, revIncludedPids, false);
return retVal;
}
@@ -2381,7 +2383,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
if (resourceId.isIdPartValidLong()) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "failedToCreateWithClientAssignedNumericId", theResource.getId().getIdPart()));
}
- return doCreate(theResource, null, thePerformIndexing);
+ return doCreate(theResource, null, thePerformIndexing, new Date());
}
}
@@ -2398,7 +2400,7 @@ public abstract class BaseHapiFhirResourceDao extends BaseH
notifyInterceptors(RestOperationTypeEnum.UPDATE, requestDetails);
// Perform update
- ResourceTable savedEntity = updateEntity(theResource, entity, true, null, thePerformIndexing, true);
+ ResourceTable savedEntity = updateEntity(theResource, entity, true, null, thePerformIndexing, true, new Date());
notifyWriteCompleted();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
index dc93764ff3b..02749018d08 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
@@ -101,7 +101,7 @@ public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao myInterceptors;
private ResourceEncodingEnum myResourceEncoding = ResourceEncodingEnum.JSONC;
+ private boolean mySubscriptionEnabled;
/**
* See {@link #setIncludeLimit(int)}
@@ -64,6 +65,13 @@ public class DaoConfig {
return myResourceEncoding;
}
+ /**
+ * See {@link #setSubscriptionEnabled(boolean)}
+ */
+ public boolean isSubscriptionEnabled() {
+ return mySubscriptionEnabled;
+ }
+
public void setHardSearchLimit(int theHardSearchLimit) {
myHardSearchLimit = theHardSearchLimit;
}
@@ -90,12 +98,12 @@ public class DaoConfig {
* ID).
*
*/
- public void setInterceptors(List theInterceptors) {
- myInterceptors = theInterceptors;
- }
-
- public void setResourceEncoding(ResourceEncodingEnum theResourceEncoding) {
- myResourceEncoding = theResourceEncoding;
+ public void setInterceptors(IServerInterceptor... theInterceptor) {
+ if (theInterceptor == null || theInterceptor.length==0){
+ setInterceptors(new ArrayList());
+ } else {
+ setInterceptors(Arrays.asList(theInterceptor));
+ }
}
/**
@@ -107,12 +115,23 @@ public class DaoConfig {
* ID).
*
*/
- public void setInterceptors(IServerInterceptor... theInterceptor) {
- if (theInterceptor == null || theInterceptor.length==0){
- setInterceptors(new ArrayList());
- } else {
- setInterceptors(Arrays.asList(theInterceptor));
- }
+ public void setInterceptors(List theInterceptors) {
+ myInterceptors = theInterceptors;
+ }
+
+ public void setResourceEncoding(ResourceEncodingEnum theResourceEncoding) {
+ myResourceEncoding = theResourceEncoding;
+ }
+
+ /**
+ * Does this server support subscription? If set to true, the server
+ * will enable the subscription monitoring mode, which adds a bit of
+ * overhead. Note that if this is enabled, you must also include
+ * Spring task scanning to your XML config for the scheduled tasks
+ * used by the subscription module.
+ */
+ public void setSubscriptionEnabled(boolean theSubscriptionEnabled) {
+ mySubscriptionEnabled = theSubscriptionEnabled;
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoQuestionnaireResponseDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoQuestionnaireResponseDstu2.java
index 0daa1bc776c..2fa92c6f9d6 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoQuestionnaireResponseDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoQuestionnaireResponseDstu2.java
@@ -27,6 +27,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
import ca.uhn.fhir.model.dstu2.resource.Questionnaire;
import ca.uhn.fhir.model.dstu2.resource.QuestionnaireResponse;
@@ -58,8 +59,8 @@ public class FhirResourceDaoQuestionnaireResponseDstu2 extends FhirResourceDaoDs
}
@Override
- protected void validateResourceForStorage(QuestionnaireResponse theResource) {
- super.validateResourceForStorage(theResource);
+ protected void validateResourceForStorage(QuestionnaireResponse theResource, ResourceTable theEntityToSave) {
+ super.validateResourceForStorage(theResource, theEntityToSave);
if (!myValidateResponses) {
return;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
new file mode 100644
index 00000000000..32d72e2196f
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
@@ -0,0 +1,195 @@
+package ca.uhn.fhir.jpa.dao;
+
+import static org.apache.commons.lang3.StringUtils.isBlank;
+
+import java.util.Date;
+import java.util.List;
+import java.util.Set;
+
+import javax.persistence.Query;
+import javax.persistence.TypedQuery;
+
+import org.apache.commons.lang3.Validate;
+import org.apache.commons.lang3.time.DateUtils;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.transaction.annotation.Propagation;
+import org.springframework.transaction.annotation.Transactional;
+
+import ca.uhn.fhir.context.RuntimeResourceDefinition;
+import ca.uhn.fhir.jpa.entity.ResourceTable;
+import ca.uhn.fhir.jpa.entity.SubscriptionTable;
+import ca.uhn.fhir.model.api.IResource;
+import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
+import ca.uhn.fhir.model.dstu2.resource.Subscription;
+import ca.uhn.fhir.model.dstu2.valueset.SubscriptionStatusEnum;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.parser.DataFormatException;
+import ca.uhn.fhir.rest.server.Constants;
+import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
+import ca.uhn.fhir.util.UrlUtil;
+import ca.uhn.fhir.util.UrlUtil.UrlParts;
+
+public class FhirResourceDaoSubscriptionDstu2 extends FhirResourceDaoDstu2implements IFhirResourceDaoSubscription {
+
+ private static final ResourceMetadataKeyEnum