Start work on expunge operation

This commit is contained in:
jamesagnew 2018-04-22 15:29:57 -04:00
parent 8ec5ca69bb
commit 51216c0010
17 changed files with 668 additions and 174 deletions

View File

@ -143,6 +143,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
protected IFulltextSearchSvc myFulltextSearchSvc;
@Autowired()
protected IResourceIndexedSearchParamUriDao myResourceIndexedSearchParamUriDao;
@Autowired()
protected IResourceIndexedSearchParamStringDao myResourceIndexedSearchParamStringDao;
@Autowired()
protected IResourceIndexedSearchParamTokenDao myResourceIndexedSearchParamTokenDao;
@Autowired()
protected IResourceIndexedSearchParamDateDao myResourceIndexedSearchParamDateDao;
@Autowired()
protected IResourceIndexedSearchParamQuantityDao myResourceIndexedSearchParamQuantityDao;
@Autowired()
protected IResourceIndexedSearchParamCoordsDao myResourceIndexedSearchParamCoordsDao;
@Autowired()
protected IResourceIndexedSearchParamNumberDao myResourceIndexedSearchParamNumberDao;
@Autowired
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
@Autowired

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -24,15 +24,13 @@ import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.dao.data.*;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
import ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils;
import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils;
import ca.uhn.fhir.model.api.*;
@ -47,15 +45,15 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerOperationInterceptor;
import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.ResourceReferenceInfo;
import com.google.common.annotations.VisibleForTesting;
import ca.uhn.fhir.util.*;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.SliceImpl;
import org.springframework.lang.NonNull;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
@ -70,6 +68,7 @@ import javax.persistence.NoResultException;
import javax.persistence.TypedQuery;
import javax.servlet.http.HttpServletResponse;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -90,6 +89,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Autowired
private IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
private IResourceHistoryTagDao myResourceHistoryTagDao;
@Autowired
private IResourceTagDao myResourceTagDao;
@Autowired
private IResourceLinkDao myResourceLinkDao;
private String myResourceName;
private Class<T> myResourceType;
@ -359,7 +362,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Override
public DeleteMethodOutcome deleteByUrl(String theUrl, RequestDetails theRequestDetails) {
List<DeleteConflict> deleteConflicts = new ArrayList<DeleteConflict>();
List<DeleteConflict> deleteConflicts = new ArrayList<>();
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequestDetails);
@ -467,6 +470,63 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return outcome;
}
private ExpungeOutcome doExpunge(Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
AtomicInteger remainingCount = new AtomicInteger(theExpungeOptions.getLimit());
if (theExpungeOptions.isExpungeDeletedResources() && theVersion == null) {
/*
* Delete historical versions of deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> resourceIds;
if (theResourceId != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, getResourceName());
} else {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, getResourceName());
}
for (Long next : resourceIds) {
expungeHistoricalVersionsOfId(next, remainingCount);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
}
/*
* Delete current versions of deleted resources
*/
for (Long next : resourceIds) {
expungeCurrentVersionOfResource(next);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
}
}
if (theExpungeOptions.isExpungeOldVersions()) {
/*
* Delete historical versions of non-deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> historicalIds;
if (theResourceId != null && theVersion != null) {
historicalIds = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, getResourceName());
}
for (Long next : historicalIds) {
expungeHistoricalVersion(next);
if (remainingCount.decrementAndGet() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
}
}
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
private <MT extends IBaseMetaType> void doMetaAdd(MT theMetaAdd, BaseHasResource entity) {
List<TagDefinition> tags = toTagList(theMetaAdd);
@ -523,6 +583,80 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
myEntityManager.merge(entity);
}
@Override
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
BaseHasResource entity = readEntity(theId);
if (theId.hasVersionIdPart()) {
BaseHasResource currentVersion = readEntity(theId.toVersionless());
if (entity.getVersion() == currentVersion.getVersion()) {
throw new PreconditionFailedException("Can not perform version-specific expunge of resource " + theId.toUnqualified().getValue() + " as this is the current version");
}
return doExpunge(entity.getResourceId(), entity.getVersion(), theExpungeOptions);
}
return doExpunge(entity.getResourceId(), null, theExpungeOptions);
}
@Override
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
return doExpunge(null, null, theExpungeOptions);
}
private void expungeCurrentVersionOfResource(Long theResourceId) {
ResourceTable resource = myResourceTableDao.findOne(theResourceId);
ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersion(resource.getId(), resource.getVersion());
expungeHistoricalVersion(currentVersion.getId());
ourLog.info("Deleting current version of resource {}", resource.getIdDt().getValue());
myResourceIndexedSearchParamUriDao.delete(resource.getParamsUri());
myResourceIndexedSearchParamCoordsDao.delete(resource.getParamsCoords());
myResourceIndexedSearchParamDateDao.delete(resource.getParamsDate());
myResourceIndexedSearchParamNumberDao.delete(resource.getParamsNumber());
myResourceIndexedSearchParamQuantityDao.delete(resource.getParamsQuantity());
myResourceIndexedSearchParamStringDao.delete(resource.getParamsString());
myResourceIndexedSearchParamTokenDao.delete(resource.getParamsToken());
myResourceTagDao.delete(resource.getTags());
resource.getTags().clear();
if (resource.getForcedId() != null) {
ForcedId forcedId = resource.getForcedId();
resource.setForcedId(null);
myResourceTableDao.saveAndFlush(resource);
myForcedIdDao.delete(forcedId);
}
myResourceTableDao.delete(resource);
}
protected void expungeHistoricalVersion(Long theNextVersionId) {
ResourceHistoryTable version = myResourceHistoryTableDao.findOne(theNextVersionId);
ourLog.info("Deleting resource version {}", version.getIdDt().getValue());
myResourceHistoryTagDao.delete(version.getTags());
myResourceHistoryTableDao.delete(version);
}
protected void expungeHistoricalVersionsOfId(Long theResourceId, AtomicInteger theRemainingCount) {
ResourceTable resource = myResourceTableDao.findOne(theResourceId);
Pageable page = new PageRequest(0, theRemainingCount.get());
Slice<Long> versionIds = myResourceHistoryTableDao.findForResourceId(page, resource.getId(), resource.getVersion());
for (Long nextVersionId : versionIds) {
expungeHistoricalVersion(nextVersionId);
if (theRemainingCount.decrementAndGet() <= 0) {
return;
}
}
}
@Override
public TagList getAllResourceTags(RequestDetails theRequestDetails) {
// Notify interceptors
@ -613,7 +747,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
Integer updatedCount = txTemplate.execute(new TransactionCallback<Integer>() {
@Override
public @NonNull Integer doInTransaction(TransactionStatus theStatus) {
public @NonNull
Integer doInTransaction(TransactionStatus theStatus) {
return myResourceTableDao.markResourcesOfTypeAsRequiringReindexing(resourceType);
}
});
@ -650,7 +785,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
doMetaAdd(theMetaAdd, history);
}
ourLog.debug("Processed metaAddOperation on {} in {}ms", new Object[]{theResourceId, w.getMillisAndRestart()});
ourLog.debug("Processed metaAddOperation on {} in {}ms", new Object[] {theResourceId, w.getMillisAndRestart()});
@SuppressWarnings("unchecked")
MT retVal = (MT) metaGetOperation(theMetaAdd.getClass(), theResourceId, theRequestDetails);
@ -684,7 +819,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
myEntityManager.flush();
ourLog.debug("Processed metaDeleteOperation on {} in {}ms", new Object[]{theResourceId.getValue(), w.getMillisAndRestart()});
ourLog.debug("Processed metaDeleteOperation on {} in {}ms", new Object[] {theResourceId.getValue(), w.getMillisAndRestart()});
@SuppressWarnings("unchecked")
MT retVal = (MT) metaGetOperation(theMetaDel.getClass(), theResourceId, theRequestDetails);
@ -1048,6 +1183,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
}
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
return new ExpungeOutcome()
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
}
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
MT retVal;
try {
@ -1099,6 +1239,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return retVal;
}
private Slice<Long> toSlice(ResourceHistoryTable theVersion) {
Validate.notNull(theVersion);
return new SliceImpl<>(Collections.singletonList(theVersion.getId()));
}
private ArrayList<TagDefinition> toTagList(IBaseMetaType theMeta) {
ArrayList<TagDefinition> retVal = new ArrayList<TagDefinition>();

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -19,24 +19,35 @@ package ca.uhn.fhir.jpa.dao;
* limitations under the License.
* #L%
*/
import java.util.*;
import org.hl7.fhir.instance.model.api.*;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.hl7.fhir.instance.model.api.IBaseMetaType;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.servlet.http.HttpServletResponse;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
@ -57,11 +68,9 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
DaoMethodOutcome create(T theResource, String theIfNoneExist);
/**
* @param thePerformIndexing
* Use with caution! If you set this to false, you need to manually perform indexing or your resources
* won't be indexed and searches won't work.
* @param theRequestDetails
* TODO
* @param thePerformIndexing Use with caution! If you set this to false, you need to manually perform indexing or your resources
* won't be indexed and searches won't work.
* @param theRequestDetails TODO
*/
DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, RequestDetails theRequestDetails);
@ -76,9 +85,8 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
/**
* This method does not throw an exception if there are delete conflicts, but populates them
* in the provided list
*
* @param theRequestDetails
* TODO
*
* @param theRequestDetails TODO
*/
DaoMethodOutcome delete(IIdType theResource, List<DeleteConflict> theDeleteConflictsListToPopulate, RequestDetails theRequestDetails);
@ -98,8 +106,14 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
*/
DeleteMethodOutcome deleteByUrl(String theString, RequestDetails theRequestDetails);
ExpungeOutcome expunge(ExpungeOptions theExpungeOptions);
ExpungeOutcome expunge(IIdType theIIdType, ExpungeOptions theExpungeOptions);
TagList getAllResourceTags(RequestDetails theRequestDetails);
<R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType);
Class<T> getResourceType();
TagList getTags(IIdType theResourceId, RequestDetails theRequestDetails);
@ -110,33 +124,29 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
/**
* Not supported in DSTU1!
*
* @param theRequestDetails
* TODO
*
* @param theRequestDetails TODO
*/
<MT extends IBaseMetaType> MT metaAddOperation(IIdType theId1, MT theMetaAdd, RequestDetails theRequestDetails);
/**
* Not supported in DSTU1!
*
* @param theRequestDetails
* TODO
*
* @param theRequestDetails TODO
*/
<MT extends IBaseMetaType> MT metaDeleteOperation(IIdType theId1, MT theMetaDel, RequestDetails theRequestDetails);
/**
* Not supported in DSTU1!
*
* @param theRequestDetails
* TODO
*
* @param theRequestDetails TODO
*/
<MT extends IBaseMetaType> MT metaGetOperation(Class<MT> theType, IIdType theId, RequestDetails theRequestDetails);
/**
* Not supported in DSTU1!
*
* @param theRequestDetails
* TODO
*
* @param theRequestDetails TODO
*/
<MT extends IBaseMetaType> MT metaGetOperation(Class<MT> theType, RequestDetails theRequestDetails);
@ -151,21 +161,17 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
T read(IIdType theId);
/**
*
* @param theId
* @param theRequestDetails
* TODO
* @throws ResourceNotFoundException
* If the ID is not known to the server
* @param theRequestDetails TODO
* @throws ResourceNotFoundException If the ID is not known to the server
*/
T read(IIdType theId, RequestDetails theRequestDetails);
BaseHasResource readEntity(IIdType theId);
/**
* @param theCheckForForcedId
* If true, this method should fail if the requested ID contains a numeric PID which exists, but is
* obscured by a "forced ID" so should not exist as far as the outside world is concerned.
* @param theCheckForForcedId If true, this method should fail if the requested ID contains a numeric PID which exists, but is
* obscured by a "forced ID" so should not exist as far as the outside world is concerned.
*/
BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId);
@ -192,9 +198,8 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
* Takes a map of incoming raw search parameters and translates/parses them into
* appropriate {@link IQueryParameterType} instances of the appropriate type
* for the given param
*
* @throws InvalidRequestException
* If any of the parameters are not known
*
* @throws InvalidRequestException If any of the parameters are not known
*/
void translateRawParameters(Map<String, List<String>> theSource, SearchParameterMap theTarget);
@ -213,35 +218,29 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
DaoMethodOutcome update(T theResource, String theMatchUrl);
/**
* @param thePerformIndexing
* Use with caution! If you set this to false, you need to manually perform indexing or your resources
* won't be indexed and searches won't work.
* @param theRequestDetails
* TODO
* @param thePerformIndexing Use with caution! If you set this to false, you need to manually perform indexing or your resources
* won't be indexed and searches won't work.
* @param theRequestDetails TODO
*/
DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, RequestDetails theRequestDetails);
DaoMethodOutcome update(T theResource, String theMatchUrl, RequestDetails theRequestDetails);
/**
* @param theForceUpdateVersion
* Create a new version with the same contents as the current version even if the content hasn't changed (this is mostly useful for
* resources mapping to external content such as external code systems)
* @param theForceUpdateVersion Create a new version with the same contents as the current version even if the content hasn't changed (this is mostly useful for
* resources mapping to external content such as external code systems)
*/
DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequestDetails);
/**
* Not supported in DSTU1!
*
* @param theRequestDetails
* TODO
*
* @param theRequestDetails TODO
*/
MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequestDetails);
RuntimeResourceDefinition validateCriteriaAndReturnResourceDefinition(String criteria);
<R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) ;
// /**
// * Invoke the everything operation
// */

View File

@ -1,8 +1,15 @@
package ca.uhn.fhir.jpa.dao.data;
import java.util.Date;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.jpa.repository.Temporal;
import org.springframework.data.repository.query.Param;
import javax.persistence.TemporalType;
import java.util.Date;
/*
* #%L
@ -13,9 +20,9 @@ import javax.persistence.TemporalType;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -24,80 +31,49 @@ import javax.persistence.TemporalType;
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.jpa.repository.Temporal;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryTable, Long> {
//@formatter:off
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myUpdated >= :cutoff")
int countForAllResourceTypes(
@Temporal(value=TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Temporal(value = TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t")
int countForAllResourceTypes(
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myUpdated >= :cutoff")
int countForResourceInstance(
@Param("id") Long theId,
@Temporal(value=TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Temporal(value = TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceId = :id")
int countForResourceInstance(
@Param("id") Long theId
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceType = :type AND t.myUpdated >= :cutoff")
int countForResourceType(
@Param("type") String theType,
@Temporal(value=TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t")
int countForAllResourceTypes(
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceId = :id")
int countForResourceInstance(
@Param("id") Long theId
);
@Temporal(value = TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceType = :type")
int countForResourceType(
@Param("type") String theType
);
// @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myUpdated >= :cutoff ORDER BY t.myUpdated DESC")
// List<ResourceHistoryTable> findForAllResourceTypes(
// @Temporal(value=TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff,
// Pageable thePageable);
//
// @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myUpdated >= :cutoff ORDER BY t.myUpdated DESC")
// List<ResourceHistoryTable> findForResourceInstance(
// @Param("id") Long theId,
// @Temporal(value=TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff,
// Pageable thePageable);
//
// @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceType = :type AND t.myUpdated >= :cutoff ORDER BY t.myUpdated DESC")
// List<ResourceHistoryTable> findForResourceType(
// @Param("type") String theType,
// @Temporal(value=TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff,
// Pageable thePageable);
//
// @Query("SELECT t FROM ResourceHistoryTable t ORDER BY t.myUpdated DESC")
// List<ResourceHistoryTable> findForAllResourceTypes(
// Pageable thePageable);
//
// @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id ORDER BY t.myUpdated DESC")
// List<ResourceHistoryTable> findForResourceInstance(
// @Param("id") Long theId,
// Pageable thePageable);
//
// @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceType = :type ORDER BY t.myUpdated DESC")
// List<ResourceHistoryTable> findForResourceType(
// @Param("type") String theType,
// Pageable thePageable);
);
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
ResourceHistoryTable findForIdAndVersion(@Param("id") long theId, @Param("version") long theVersion);
//@formatter:on
@Query("SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion")
Slice<Long> findForResourceId(Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query("" +
"SELECT v.myId FROM ResourceHistoryTable v " +
"LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " +
"WHERE v.myResourceVersion != t.myVersion AND " +
"t.myResourceType = :restype")
Slice<Long> findIdsOfPreviousVersionsOfResources(Pageable thePage, @Param("restype") String theResourceName);
}

View File

@ -0,0 +1,28 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTag;
import org.springframework.data.jpa.repository.JpaRepository;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IResourceHistoryTagDao extends JpaRepository<ResourceHistoryTag, Long> {
// nothing
}

View File

@ -1,7 +1,13 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
/*
* #%L
* HAPI FHIR JPA Server
@ -11,9 +17,9 @@ import org.springframework.data.domain.Slice;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -21,18 +27,20 @@ import org.springframework.data.domain.Slice;
* limitations under the License.
* #L%
*/
import org.springframework.data.jpa.repository.*;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.ResourceTable;
public interface IResourceTableDao extends JpaRepository<ResourceTable, Long> {
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myResourceType = :restype AND t.myDeleted IS NOT NULL")
Slice<Long> findIdsOfDeletedResourcesOfType(Pageable thePageable, @Param("restype") String theResourceName);
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myId = :resid AND t.myResourceType = :restype AND t.myDeleted IS NOT NULL")
Slice<Long> findIdsOfDeletedResourcesOfType(Pageable thePageable, @Param("resid") Long theResourceId, @Param("restype") String theResourceName);
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myIndexStatus IS NULL")
Slice<Long> findUnindexed(Pageable thePageRequest);
@Modifying
@Query("UPDATE ResourceTable r SET r.myIndexStatus = null WHERE r.myResourceType = :restype")
int markResourcesOfTypeAsRequiringReindexing(@Param("restype") String theResourceType);
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myIndexStatus IS NULL")
Slice<Long> findUnindexed(Pageable thePageRequest);
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -104,6 +104,8 @@ public abstract class BaseHasResource {
myPublished = thePublished;
}
public abstract Long getResourceId();
public abstract String getResourceType();
public abstract Collection<? extends BaseTag> getTags();

View File

@ -22,10 +22,7 @@ package ca.uhn.fhir.jpa.entity;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.MappedSuperclass;
import javax.persistence.*;
@MappedSuperclass
public class BaseTag implements Serializable {

View File

@ -137,6 +137,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
myResource = theResource;
}
@Override
public Long getResourceId() {
return myResourceId;
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.OptimisticLock;
@ -401,6 +400,11 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myProfile = theProfile;
}
@Override
public Long getResourceId() {
return getId();
}
// public byte[] getResource() {
// Validate.notNull(myEncoding, "myEncoding is null");
// return myResource;

View File

@ -55,7 +55,6 @@ public abstract class BaseJpaResourceProvider<T extends IBaseResource> extends B
return myDao;
}
//@formatter:off
@History
public IBundleProvider getHistoryForResourceInstance(
HttpServletRequest theRequest,
@ -63,7 +62,6 @@ public abstract class BaseJpaResourceProvider<T extends IBaseResource> extends B
@Since Date theSince,
@At DateRangeParam theAt,
RequestDetails theRequestDetails) {
//@formatter:on
startRequest(theRequest);
try {

View File

@ -45,6 +45,9 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
public static final String LOINC_RXNORM_PART_MAP_ID = "loinc-parts-to-rxnorm";
public static final String LOINC_RXNORM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-rxnorm";
public static final String LOINC_RXNORM_PART_MAP_NAME = "LOINC Part Map to RxNORM";
public static final String LOINC_RADLEX_PART_MAP_ID = "loinc-parts-to-radlex";
public static final String LOINC_RADLEX_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-radlex";
public static final String LOINC_RADLEX_PART_MAP_NAME = "LOINC Part Map to RADLEX";
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC Part File, LOINC/SNOMED CT Expression Association and Map Sets File, RELMA database and associated search index files include SNOMED Clinical Terms (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights are reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO. Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org. Under the terms of the Affiliate License, use of SNOMED CT in countries that are not IHTSDO Members is subject to reporting and fee payment obligations. However, IHTSDO agrees to waive the requirements to report and pay fees for use of SNOMED CT content included in the LOINC Part Mapping and LOINC Term Associations for purposes that support or enable more effective use of LOINC. This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.";
private final Map<String, TermConcept> myCode2Concept;
private final TermCodeSystemVersion myCodeSystemVersion;
@ -104,6 +107,11 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
loincPartMapUri = LOINC_RXNORM_PART_MAP_URI;
loincPartMapName = LOINC_RXNORM_PART_MAP_NAME;
break;
case "http://www.radlex.org":
loincPartMapId = LOINC_RADLEX_PART_MAP_ID;
loincPartMapUri = LOINC_RADLEX_PART_MAP_URI;
loincPartMapName = LOINC_RADLEX_PART_MAP_NAME;
break;
default:
throw new InternalErrorException("Don't know how to handle mapping to system: " + extCodeSystem);
}

View File

@ -0,0 +1,39 @@
package ca.uhn.fhir.jpa.util;
public class ExpungeOptions {
private int myLimit = 1000;
private boolean myExpungeOldVersions;
private boolean myExpungeDeletedResources;
/**
* The maximum number of resources versions to expunge
*/
public int getLimit() {
return myLimit;
}
/**
* The maximum number of resource versions to expunge
*/
public void setLimit(int theLimit) {
myLimit = theLimit;
}
public boolean isExpungeDeletedResources() {
return myExpungeDeletedResources;
}
public ExpungeOptions setExpungeDeletedResources(boolean theExpungeDeletedResources) {
myExpungeDeletedResources = theExpungeDeletedResources;
return this;
}
public boolean isExpungeOldVersions() {
return myExpungeOldVersions;
}
public ExpungeOptions setExpungeOldVersions(boolean theExpungeOldVersions) {
myExpungeOldVersions = theExpungeOldVersions;
return this;
}
}

View File

@ -0,0 +1,14 @@
package ca.uhn.fhir.jpa.util;
public class ExpungeOutcome {
private int myDeletedCount;
public int getDeletedCount() {
return myDeletedCount;
}
public ExpungeOutcome setDeletedCount(int theDeletedCount) {
myDeletedCount = theDeletedCount;
return this;
}
}

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.util;
* #L%
*/
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.io.IOException;
@ -64,55 +65,69 @@ public class TestUtil {
if (entity == null) {
continue;
}
ourLog.info("Scanning: {}", clazz.getSimpleName());
scan(clazz, names);
for (Field nextField : clazz.getDeclaredFields()) {
ourLog.info(" * Scanning field: {}", nextField.getName());
scan(nextField, names);
}
scanClass(names, clazz, false);
}
}
private static void scan(AnnotatedElement ae, Set<String> theNames) {
private static void scanClass(Set<String> theNames, Class<?> theClazz, boolean theIsSuperClass) {
ourLog.info("Scanning: {}", theClazz.getSimpleName());
scan(theClazz, theNames, theIsSuperClass);
for (Field nextField : theClazz.getDeclaredFields()) {
ourLog.info(" * Scanning field: {}", nextField.getName());
scan(nextField, theNames, theIsSuperClass);
}
if (theClazz.getSuperclass().equals(Object.class)) {
return;
}
scanClass(theNames, theClazz.getSuperclass(), true);
}
private static void scan(AnnotatedElement ae, Set<String> theNames, boolean theIsSuperClass) {
Table table = ae.getAnnotation(Table.class);
if (table != null) {
assertThat(table.name(), theNames);
assertNotADuplicateName(table.name(), theNames);
for (UniqueConstraint nextConstraint : table.uniqueConstraints()) {
assertThat(nextConstraint.name(), theNames);
assertNotADuplicateName(nextConstraint.name(), theNames);
Validate.isTrue(nextConstraint.name().startsWith("IDX_"), nextConstraint.name() + " must start with IDX_");
}
for (Index nextConstraint : table.indexes()) {
assertThat(nextConstraint.name(), theNames);
assertNotADuplicateName(nextConstraint.name(), theNames);
Validate.isTrue(nextConstraint.name().startsWith("IDX_"), nextConstraint.name() + " must start with IDX_");
}
}
JoinColumn joinColumn = ae.getAnnotation(JoinColumn.class);
if (joinColumn != null) {
assertThat(joinColumn.name(), null);
assertNotADuplicateName(joinColumn.name(), null);
ForeignKey fk = joinColumn.foreignKey();
Validate.notNull(fk);
Validate.isTrue(isNotBlank(fk.name()), "Foreign key on " + ae.toString() + " has no name()");
Validate.isTrue(fk.name().startsWith("FK_"));
assertThat(fk.name(), theNames);
if (theIsSuperClass) {
Validate.isTrue(isBlank(fk.name()), "Foreign key on " + ae.toString() + " has a name() and should not as it is a superclass");
} else {
Validate.notNull(fk);
Validate.isTrue(isNotBlank(fk.name()), "Foreign key on " + ae.toString() + " has no name()");
Validate.isTrue(fk.name().startsWith("FK_"));
assertNotADuplicateName(fk.name(), theNames);
}
}
Column column = ae.getAnnotation(Column.class);
if (column != null) {
assertThat(column.name(), null);
assertNotADuplicateName(column.name(), null);
}
GeneratedValue gen = ae.getAnnotation(GeneratedValue.class);
SequenceGenerator sg = ae.getAnnotation(SequenceGenerator.class);
Validate.isTrue((gen != null) == (sg != null));
if (gen != null) {
assertThat(gen.generator(), theNames);
assertThat(sg.name(), null);
assertThat(sg.sequenceName(), null);
assertNotADuplicateName(gen.generator(), theNames);
assertNotADuplicateName(sg.name(), null);
assertNotADuplicateName(sg.sequenceName(), null);
assertEquals(gen.generator(), sg.name());
assertEquals(gen.generator(), sg.sequenceName());
}
@ -123,7 +138,10 @@ public class TestUtil {
Validate.isTrue(theGenerator.equals(theName));
}
private static void assertThat(String theName, Set<String> theNames) {
private static void assertNotADuplicateName(String theName, Set<String> theNames) {
if (isBlank(theName)) {
return;
}
Validate.isTrue(theName.length() <= MAX_LENGTH, "Identifier \"" + theName + "\" is " + theName.length() + " chars long");
if (theNames != null) {
Validate.isTrue(theNames.add(theName), "Duplicate name: " + theName);

View File

@ -105,7 +105,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
.countQuery(new ThreadQueryCountHolder())
.build();

View File

@ -0,0 +1,245 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.TestUtil;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
public class ExpungeR4Test extends BaseResourceProviderR4Test {
private IIdType myOneVersionPatientId;
private IIdType myTwoVersionPatientId;
private IIdType myDeletedPatientId;
private IIdType myOneVersionObservationId;
private IIdType myTwoVersionObservationId;
private IIdType myDeletedObservationId;
private void assertExpunged(IIdType theId) {
try {
getDao(theId).read(theId);
fail();
} catch (ResourceNotFoundException e) {
// good
}
}
private void assertGone(IIdType theId) {
try {
getDao(theId).read(theId);
fail();
} catch (ResourceGoneException e) {
// good
}
}
private void assertStillThere(IIdType theId) {
getDao(theId).read(theId);
}
@Override
@Before
public void before() throws Exception {
super.before();
Patient p = new Patient();
p.setId("PT-ONEVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
p.addIdentifier().setSystem("foo").setValue("bar");
p.addName().setFamily("FAM");
myOneVersionPatientId = myPatientDao.update(p).getId();
p = new Patient();
p.setId("PT-TWOVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
myTwoVersionPatientId = myPatientDao.update(p).getId();
p.setActive(false);
myTwoVersionPatientId = myPatientDao.update(p).getId();
p = new Patient();
p.setId("PT-DELETED");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
myDeletedPatientId = myPatientDao.update(p).getId();
myDeletedPatientId = myPatientDao.delete(myDeletedPatientId).getId();
assertStillThere(myDeletedPatientId.withVersion("1"));
assertGone(myDeletedPatientId.withVersion("2"));
// Observation
Observation o = new Observation();
o.setStatus(Observation.ObservationStatus.FINAL);
myOneVersionObservationId = myObservationDao.create(o).getId();
o = new Observation();
o.setStatus(Observation.ObservationStatus.FINAL);
myTwoVersionObservationId = myObservationDao.create(o).getId();
o.setStatus(Observation.ObservationStatus.AMENDED);
myTwoVersionObservationId = myObservationDao.update(o).getId();
o = new Observation();
o.setStatus(Observation.ObservationStatus.FINAL);
myDeletedObservationId = myObservationDao.create(o).getId();
myDeletedObservationId = myObservationDao.delete(myDeletedObservationId).getId();
}
private IFhirResourceDao<?> getDao(IIdType theId) {
IFhirResourceDao<?> dao;
switch (theId.getResourceType()) {
case "Patient":
dao = myPatientDao;
break;
case "Observation":
dao = myObservationDao;
break;
default:
fail("Restype: " + theId.getResourceType());
dao = myPatientDao;
}
return dao;
}
@Test
public void testExpungeInstanceOldVersionsAndDeleted() {
Patient p = new Patient();
p.setId("PT-TWOVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
p.addName().setFamily("FOO");
myPatientDao.update(p).getId();
myPatientDao.expunge(myTwoVersionPatientId.toUnqualifiedVersionless(), new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));
// Patients
assertStillThere(myOneVersionPatientId);
assertExpunged(myTwoVersionPatientId.withVersion("1"));
assertExpunged(myTwoVersionPatientId.withVersion("2"));
assertStillThere(myTwoVersionPatientId.withVersion("3"));
assertGone(myDeletedPatientId);
// No observations deleted
assertStillThere(myOneVersionObservationId);
assertStillThere(myTwoVersionObservationId.withVersion("1"));
assertStillThere(myTwoVersionObservationId.withVersion("2"));
assertGone(myDeletedObservationId);
}
@Test
public void testExpungeInstanceVersionCurrentVersion() {
try {
myPatientDao.expunge(myTwoVersionPatientId.withVersion("2"), new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));
fail();
} catch (PreconditionFailedException e) {
assertEquals("Can not perform version-specific expunge of resource Patient/PT-TWOVERSION/_history/2 as this is the current version", e.getMessage());
}
}
@Test
public void testExpungeInstanceVersionOldVersionsAndDeleted() {
Patient p = new Patient();
p.setId("PT-TWOVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
p.addName().setFamily("FOO");
myPatientDao.update(p).getId();
myPatientDao.expunge(myTwoVersionPatientId.withVersion("2"), new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));
// Patients
assertStillThere(myOneVersionPatientId);
assertStillThere(myTwoVersionPatientId.withVersion("1"));
assertExpunged(myTwoVersionPatientId.withVersion("2"));
assertStillThere(myTwoVersionPatientId.withVersion("3"));
assertGone(myDeletedPatientId);
// No observations deleted
assertStillThere(myOneVersionObservationId);
assertStillThere(myTwoVersionObservationId.withVersion("1"));
assertStillThere(myTwoVersionObservationId.withVersion("2"));
assertGone(myDeletedObservationId);
}
@Test
public void testExpungeTypeDeletedResources() {
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(false));
// Only deleted and prior patients
assertStillThere(myOneVersionPatientId);
assertStillThere(myTwoVersionPatientId.withVersion("1"));
assertStillThere(myTwoVersionPatientId.withVersion("2"));
assertExpunged(myDeletedPatientId);
// No observations deleted
assertStillThere(myOneVersionObservationId);
assertStillThere(myTwoVersionObservationId.withVersion("1"));
assertStillThere(myTwoVersionObservationId.withVersion("2"));
assertGone(myDeletedObservationId);
}
@Test
public void testExpungeTypeOldVersions() {
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(false)
.setExpungeOldVersions(true));
// Only deleted and prior patients
assertStillThere(myOneVersionPatientId);
assertExpunged(myTwoVersionPatientId.withVersion("1"));
assertStillThere(myTwoVersionPatientId.withVersion("2"));
assertExpunged(myDeletedPatientId.withVersion("1"));
assertGone(myDeletedPatientId);
// No observations deleted
assertStillThere(myOneVersionObservationId);
assertStillThere(myTwoVersionObservationId.withVersion("1"));
assertStillThere(myTwoVersionObservationId.withVersion("2"));
assertGone(myDeletedObservationId);
}
@Test
public void testExpungeTypeOldVersionsAndDeleted() {
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));
// Only deleted and prior patients
assertStillThere(myOneVersionPatientId);
assertExpunged(myTwoVersionPatientId.withVersion("1"));
assertStillThere(myTwoVersionPatientId.withVersion("2"));
assertExpunged(myDeletedPatientId);
// No observations deleted
assertStillThere(myOneVersionObservationId);
assertStillThere(myTwoVersionObservationId.withVersion("1"));
assertStillThere(myTwoVersionObservationId.withVersion("2"));
assertGone(myDeletedObservationId);
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}